summaryrefslogtreecommitdiffstats
path: root/ansible_collections/cisco/dnac
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
commit38b7c80217c4e72b1d8988eb1e60bb6e77334114 (patch)
tree356e9fd3762877d07cde52d21e77070aeff7e789 /ansible_collections/cisco/dnac
parentAdding upstream version 7.7.0+dfsg. (diff)
downloadansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.tar.xz
ansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.zip
Adding upstream version 9.4.0+dfsg.upstream/9.4.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/cisco/dnac')
-rw-r--r--ansible_collections/cisco/dnac/.github/workflows/sanity_tests.yml3
-rw-r--r--ansible_collections/cisco/dnac/.gitignore3
-rw-r--r--ansible_collections/cisco/dnac/FILES.json367
-rw-r--r--ansible_collections/cisco/dnac/MANIFEST.json16
-rw-r--r--ansible_collections/cisco/dnac/README.md2
-rw-r--r--ansible_collections/cisco/dnac/changelogs/changelog.yaml161
-rw-r--r--ansible_collections/cisco/dnac/docs/conf.py3
-rw-r--r--ansible_collections/cisco/dnac/meta/runtime.yml2
-rw-r--r--ansible_collections/cisco/dnac/playbooks/PnP.yml111
-rw-r--r--ansible_collections/cisco/dnac/playbooks/PnP_Workflow_Manager_Playbook.yml111
-rw-r--r--ansible_collections/cisco/dnac/playbooks/credentials.template5
-rw-r--r--ansible_collections/cisco/dnac/playbooks/credentials.yml10
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_credential_intent.yml119
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml103
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_details.template69
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_details.yml54
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_provision.yml37
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml38
-rw-r--r--ansible_collections/cisco/dnac/playbooks/discovery.yml14
-rw-r--r--ansible_collections/cisco/dnac/playbooks/discovery_intent.yml197
-rw-r--r--ansible_collections/cisco/dnac/playbooks/discovery_workflow_manager.yml197
-rw-r--r--ansible_collections/cisco/dnac/playbooks/global_credentials.yml6
-rw-r--r--ansible_collections/cisco/dnac/playbooks/inventory_device.yml53
-rw-r--r--ansible_collections/cisco/dnac/playbooks/inventory_workflow_manager.yml67
-rw-r--r--ansible_collections/cisco/dnac/playbooks/network_settings_intent.yml112
-rw-r--r--ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml108
-rw-r--r--ansible_collections/cisco/dnac/playbooks/site_intent.yml32
-rw-r--r--ansible_collections/cisco/dnac/playbooks/site_workflow_manager.yml39
-rw-r--r--ansible_collections/cisco/dnac/playbooks/swim_import_local.yml22
-rw-r--r--ansible_collections/cisco/dnac/playbooks/swim_intent.yml43
-rw-r--r--ansible_collections/cisco/dnac/playbooks/swim_workflow_manager.yml54
-rw-r--r--ansible_collections/cisco/dnac/playbooks/template_pnp_intent.yml37
-rw-r--r--ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml34
-rw-r--r--ansible_collections/cisco/dnac/playbooks/test_swim_module.yml51
-rw-r--r--ansible_collections/cisco/dnac/playbooks/tests/test-1592357065255.csv3
-rw-r--r--ansible_collections/cisco/dnac/playbooks/user_info.yml18
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/cli_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/http_read_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/http_write_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/netconf_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/role_permissions_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/roles_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/swim_import_local.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_member.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/user.py12
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/user_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/users_external_servers_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/doc_fragments/intent_params.py54
-rw-r--r--ansible_collections/cisco/dnac/plugins/doc_fragments/workflow_manager_params.py117
-rw-r--r--ansible_collections/cisco/dnac/plugins/module_utils/dnac.py719
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py2618
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py2617
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_intent.py1713
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_workflow_manager.py1713
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py3644
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py3638
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_settings_intent.py2225
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py2210
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_intent.py1882
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_manager.py1301
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/provision_intent.py620
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py737
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/roles_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_intent.py902
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py1087
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_intent.py1871
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_workflow_manager.py1896
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_member.py1
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/template_intent.py3033
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py2885
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/user.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/user_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py2
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt20
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt20
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt22
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt12
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt12
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt37
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt20
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/dnac_module.py137
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_discovery_intent.py170
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_pnp_intent.py113
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_site_intent.py100
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_swim_intent.py134
-rw-r--r--ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_template_intent.py99
92 files changed, 38389 insertions, 2372 deletions
diff --git a/ansible_collections/cisco/dnac/.github/workflows/sanity_tests.yml b/ansible_collections/cisco/dnac/.github/workflows/sanity_tests.yml
index 2f07550bc..32d46d478 100644
--- a/ansible_collections/cisco/dnac/.github/workflows/sanity_tests.yml
+++ b/ansible_collections/cisco/dnac/.github/workflows/sanity_tests.yml
@@ -14,9 +14,8 @@ jobs:
strategy:
matrix:
ansible:
- - stable-2.12
- - stable-2.13
- stable-2.14
+ - stable-2.15
- devel
runs-on: ubuntu-20.04
steps:
diff --git a/ansible_collections/cisco/dnac/.gitignore b/ansible_collections/cisco/dnac/.gitignore
index 192206246..a14cf3596 100644
--- a/ansible_collections/cisco/dnac/.gitignore
+++ b/ansible_collections/cisco/dnac/.gitignore
@@ -18,4 +18,5 @@ docs/plugins/
docs/_build/
changelogs/.plugin-cache.yaml
playbooks/credentials.yml
-.DS_Store \ No newline at end of file
+playbooks/device_details.template
+.DS_Store
diff --git a/ansible_collections/cisco/dnac/FILES.json b/ansible_collections/cisco/dnac/FILES.json
index ac121960d..3986f5776 100644
--- a/ansible_collections/cisco/dnac/FILES.json
+++ b/ansible_collections/cisco/dnac/FILES.json
@@ -43,6 +43,13 @@
"format": 1
},
{
+ "name": "plugins/doc_fragments/workflow_manager_params.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "28bcde33cf2bcc2353053899e2252aa1a4f69185f3e33d4e9ac24b38bb5b3960",
+ "format": 1
+ },
+ {
"name": "plugins/doc_fragments/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -60,7 +67,7 @@
"name": "plugins/doc_fragments/intent_params.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "07a9cf86b25669319b7c2876b33415a9f44e1f103927afdcca31064f21a23156",
+ "chksum_sha256": "d73ef79f87c387fe05e272bf33c9a5ccf77ed4cfb1d1c0c2d92fa1414d59be05",
"format": 1
},
{
@@ -88,7 +95,7 @@
"name": "plugins/action/tag_member.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d27dab6d8a2b0b883879237b7ddd3cacccb0877fe57e4d31b56b34ec58ef13b",
+ "chksum_sha256": "59607962abe1c2545059d5f932fc3a4a3c4ccd1de9b564da7343a85949ded82c",
"format": 1
},
{
@@ -179,7 +186,7 @@
"name": "plugins/action/snmpv2_read_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ede14a53420c8775bba0e65e8ab4b8d90806767eed06f53364aa4da2bbe0955",
+ "chksum_sha256": "04e5b74ac64d19bf858a84c4d5befda887a299c3ed56a413281531ebd746e0a7",
"format": 1
},
{
@@ -235,7 +242,7 @@
"name": "plugins/action/roles_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a21edc9c6caa7789a63d76bf2d404c7315d1014b881fabc6567a6835108c63e",
+ "chksum_sha256": "a4420d31461cea8c994edb1e976072c3ccca1be6d66c2bb7f2eb4678799bcdb1",
"format": 1
},
{
@@ -312,7 +319,7 @@
"name": "plugins/action/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f3d64bc7a43c504e148f1c739fa892a0ba4546b72ad2dc8ef4e3cba6a4a4400",
+ "chksum_sha256": "e4ad8c0110b4a2ce7ac3b9a3f87789e17e787641e760bd29f6d6c511a64b7b34",
"format": 1
},
{
@@ -893,7 +900,7 @@
"name": "plugins/action/snmpv2_write_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9833fe35d6e3ebedc6467283ab3341d350c7056e222ca7c67462f7047bf736f9",
+ "chksum_sha256": "2c391e722933e237cd7567955e16a1285e7d0e77b3b6daf30f9f586cec053675",
"format": 1
},
{
@@ -1089,7 +1096,7 @@
"name": "plugins/action/pnp_device_claim_to_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b3bc80998516cbfe7aeca7aae70da2c44fc31f6c040f94f4cb94cf9bd821b9f",
+ "chksum_sha256": "8825b8f2fc53aa0593a593862f62c60f139ef02c5ae29ae7bd179049e7b7b3fc",
"format": 1
},
{
@@ -1334,7 +1341,7 @@
"name": "plugins/action/http_read_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aca30deb8e5335cfd932fed68b30ef559ec2a66a3e9d8876cf86d533ef52856b",
+ "chksum_sha256": "ea4512b9a8e6dfe8fed6788ec7c94c5c89ac2dca5d9fb291e88697c32dbd4bd8",
"format": 1
},
{
@@ -1348,7 +1355,7 @@
"name": "plugins/action/swim_import_local.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8dd1c5d02ff6dc70dff6724e4fa62f657faf4654660e9e8f487eefe7ed725976",
+ "chksum_sha256": "cd8697519e27bd68e4ea9c3e932ec126715606e5db5708019482f43926e0c56a",
"format": 1
},
{
@@ -1474,7 +1481,7 @@
"name": "plugins/action/cli_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76be1e7004db40898fde05224984a13b1401d0a1f987713c0aabeb57b6a814b0",
+ "chksum_sha256": "56d1d6cb86590b4d45b0e1b9fbc573065d1061b1a06e1da9bdad9096bfbb1df7",
"format": 1
},
{
@@ -1495,7 +1502,7 @@
"name": "plugins/action/role_permissions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2595e8bca41ff916f1aacae4ed78da12b05dab3047d532799a023eecc18f7c2d",
+ "chksum_sha256": "207bdf932df8a11ed1cbe5a7ebe0eee0fd2874f65fbd10b826d1d8d62f0e0ddf",
"format": 1
},
{
@@ -1726,7 +1733,7 @@
"name": "plugins/action/http_write_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "877aea365a574924a1d120ac3ab63fdb0795d35e7edce33b431b3f45be7e6a6c",
+ "chksum_sha256": "c91fe4cdf627f8dba64a72176fd4a76d8628db887fa5208a3ea5db9179c0fb10",
"format": 1
},
{
@@ -1754,7 +1761,7 @@
"name": "plugins/action/users_external_servers_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "60c1e3c93d50443896c04b4c81306b1601d82256d123f739b1dbedd5dcd14a7d",
+ "chksum_sha256": "845f4a98fa6dbec0b93c620bc9b63f1b8ae8db3a5b168a455843ee71f19b1c31",
"format": 1
},
{
@@ -1992,7 +1999,7 @@
"name": "plugins/action/user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4968f5e61b77eb812031a4cceff93f8fc46e270ec506962d1c710b6cc7c3c8d4",
+ "chksum_sha256": "5381b94a7d702556e79ef19efd80addf139ea41fcb38394389a14197b763c085",
"format": 1
},
{
@@ -2342,7 +2349,7 @@
"name": "plugins/action/snmpv3_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a066b234e198d4772b9c71b8c713c4eed01d1a977d6b7a11064d275eb46d4510",
+ "chksum_sha256": "631335eb762e3905203e5ad892b569e356374bd3444b121277f7c7aa4cf38ef7",
"format": 1
},
{
@@ -2489,7 +2496,7 @@
"name": "plugins/action/netconf_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "763e64a1560811818413eec8559be314b00712433245a87535754944e75fa67f",
+ "chksum_sha256": "8653f7f42fd41f7456cd998782e490b36047ce87e184517d22490395717ddd49",
"format": 1
},
{
@@ -2783,7 +2790,7 @@
"name": "plugins/plugin_utils/dnac.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f483a8fb86778620524a8bb49f57451b97b828a08999cc24ab8b41e7af08290",
+ "chksum_sha256": "10337ed46f1639cf19961bef89742250412ea554529a33f67812b5264d29aaa3",
"format": 1
},
{
@@ -2811,7 +2818,7 @@
"name": "plugins/module_utils/dnac.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0fda95080c0aa616a8e2b1a383ff76d337801c753d1ec4ad4e22bdb54a9a187",
+ "chksum_sha256": "d1e4ec066734657d8a31d57fdf241dfb7b21e88d042db1fea1f65f2f09b7ff24",
"format": 1
},
{
@@ -2825,7 +2832,7 @@
"name": "plugins/modules/tag_member.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a2bd82e30a7de013e92d449db152ebe63ea43cd1e9677539f97494f08a2d4f0",
+ "chksum_sha256": "203c909628aff2179480a93f6e37ab0a16e075db1c288c024e61455242e7dc81",
"format": 1
},
{
@@ -2972,7 +2979,7 @@
"name": "plugins/modules/roles_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fdb38fecd2587151f93ad5fce2508aea059b3686b4e5ea671591c6264a9677bd",
+ "chksum_sha256": "d62bd99d36a17041da882dcfb46a8c59b9394bf6c794b93d8e63d1a9777f5fcb",
"format": 1
},
{
@@ -3049,7 +3056,7 @@
"name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "955f026808f77c135b3e8d223b06996852373f610dca0a840827d4b65d934107",
+ "chksum_sha256": "15816f3c76bdd77734d55c36d2aaf6af8894d63e5191e21f78e1e9cfe4b8a207",
"format": 1
},
{
@@ -3193,6 +3200,13 @@
"format": 1
},
{
+ "name": "plugins/modules/pnp_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aa8f6e21144259fab986674b6c92acf78b620297976704f4a93b43a265b67f08",
+ "format": 1
+ },
+ {
"name": "plugins/modules/global_pool.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3270,6 +3284,13 @@
"format": 1
},
{
+ "name": "plugins/modules/swim_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1e32599b2c25efd5a1be3e66e8ea1bf0b6871218bde74b2e4e828c67d1bf994",
+ "format": 1
+ },
+ {
"name": "plugins/modules/execute_suggested_actions_commands.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3284,6 +3305,13 @@
"format": 1
},
{
+ "name": "plugins/modules/network_settings_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0a00e9cece5905436095add9b58a97e5292f9f941df31af8bd5f77e5a665352",
+ "format": 1
+ },
+ {
"name": "plugins/modules/wireless_provision_device_update.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3385,7 +3413,7 @@
"name": "plugins/modules/pnp_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2cedaf6479957ce66dbd5ed51d3ece54b5b902dd1dc1b793f6c30f38bd473ba5",
+ "chksum_sha256": "46895eac51c54b6786d2d15fabe55a7fe4c38db13f6be4701876ad0db8ca1693",
"format": 1
},
{
@@ -3522,6 +3550,13 @@
"format": 1
},
{
+ "name": "plugins/modules/provision_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1edf799c5bace445ba47dd5ab7edac249819cded67c0c02c86fec3c6573aaf47",
+ "format": 1
+ },
+ {
"name": "plugins/modules/pnp_workflow_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3676,6 +3711,13 @@
"format": 1
},
{
+ "name": "plugins/modules/site_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d691b4d29437329a12425232c52a9027819b496ccd47e80e19a44fffcbdafc42",
+ "format": 1
+ },
+ {
"name": "plugins/modules/global_credential_delete.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3760,6 +3802,13 @@
"format": 1
},
{
+ "name": "plugins/modules/device_credential_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f2a4e13de9da183ced28ef4ff11c9036c76fcd474b760af56f4d40caa449f16d",
+ "format": 1
+ },
+ {
"name": "plugins/modules/network_device_polling_interval_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -4068,10 +4117,17 @@
"format": 1
},
{
+ "name": "plugins/modules/device_credential_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "66fa8406e7af3248eef9be43a7e2ce267d81eb478ea00755b9531461aca0d961",
+ "format": 1
+ },
+ {
"name": "plugins/modules/site_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f0da836a0e59bc71b713922eccb0c31473aaf6222ad5d9df8f7d76ee975f27a",
+ "chksum_sha256": "81af801ed37704ab603d8d0531a78bef58a8ca2994e4ef0f09d6ab6fd88e97a5",
"format": 1
},
{
@@ -4159,6 +4215,13 @@
"format": 1
},
{
+ "name": "plugins/modules/discovery_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5d4d5d62ed389c1286fcbc13ecbb47790cff49e0c7ae12b8ae0dd313963bfd7e",
+ "format": 1
+ },
+ {
"name": "plugins/modules/command_runner_run_command.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -4180,6 +4243,13 @@
"format": 1
},
{
+ "name": "plugins/modules/discovery_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2e4e1f1c128ca5334ed627854e7af56a330e09f7fa7bfd2e20605ad215879e25",
+ "format": 1
+ },
+ {
"name": "plugins/modules/event_syslog_config.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -4253,7 +4323,7 @@
"name": "plugins/modules/role_permissions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c8debde7e41f126c3114fe53b7fc7fa3dc95fad19e6446151e26194fe8d8c8b",
+ "chksum_sha256": "58174ea096129749a9c9cdd3659fd71434154e4fb988ae66f556835b286241cf",
"format": 1
},
{
@@ -4432,6 +4502,13 @@
"format": 1
},
{
+ "name": "plugins/modules/template_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9caa9ab726bc593d6f485fbc3ef43c41b614feb8328f91cac1d8367be9fdc49d",
+ "format": 1
+ },
+ {
"name": "plugins/modules/discovery_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -4512,7 +4589,7 @@
"name": "plugins/modules/users_external_servers_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c3b4ddba6efb41bd6152963541458bf35ab700a2db649de9cddc7f7a37539c0c",
+ "chksum_sha256": "b7276641ce82a3f1b92e1411603569c75668d1693e3d3c2af4cdccf4ffc594e4",
"format": 1
},
{
@@ -4740,6 +4817,13 @@
"format": 1
},
{
+ "name": "plugins/modules/inventory_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "902aaf3d5251b10cd002f1f24710bd0fd076d5caa203fd64966bbbdd14274d2e",
+ "format": 1
+ },
+ {
"name": "plugins/modules/global_credential_v2_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -4757,7 +4841,7 @@
"name": "plugins/modules/user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "596af1aa3c31150551f13f79ab496008d0d82721d1eb4b656befcd241a0a62fc",
+ "chksum_sha256": "7d6202b171f91705f0f0b78b0f04293510e3964a05e667a107847d3a26b2b2fe",
"format": 1
},
{
@@ -5051,7 +5135,7 @@
"name": "plugins/modules/swim_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9aaa06f3e35938360e68e7c15080dd12e3a57c11c3ab2f563daae1af62e99a6c",
+ "chksum_sha256": "a8b6020f943c3d565d60d2644acd609a03aebe0c4cd8b08799c09c9f13b9745b",
"format": 1
},
{
@@ -5279,6 +5363,13 @@
"format": 1
},
{
+ "name": "plugins/modules/provision_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ea01d7479f375bd14749bd5b429060015d6749db77c8cdabce553102a87d0001",
+ "format": 1
+ },
+ {
"name": "plugins/modules/interface_network_device_range_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5286,6 +5377,13 @@
"format": 1
},
{
+ "name": "plugins/modules/inventory_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3e32ad491d38e58f18dc8f8f91fa6ba179a66486560b0af6b478b77ac7a0d17d",
+ "format": 1
+ },
+ {
"name": "plugins/modules/compliance_device_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5366,7 +5464,7 @@
"name": "plugins/modules/template_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a34cacbdc1dc8e057f6376550bd508dd14e05a11fbb07d06684550acf4d0c889",
+ "chksum_sha256": "90ad608b3f5bc90983bc7bc913a7faac8179061d5f16d8375d43db5fa8e1129e",
"format": 1
},
{
@@ -5454,6 +5552,13 @@
"format": 1
},
{
+ "name": "plugins/modules/network_settings_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "85003b655194a3dfdcd2fe1e7780475d8f0d75f602325be07fbc6fd8b5b71988",
+ "format": 1
+ },
+ {
"name": "plugins/modules/itsm_integration_events_retry.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5576,21 +5681,21 @@
"name": "tests/unit/modules/dnac/dnac_module.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53567555cf827e4b33225ff75bf0e9e28fe7cc538dd07a86db5e25f030ecd6f3",
+ "chksum_sha256": "9354c0be1ebdff7ca3a157bbef53419dc7e10aa99f63224a39b50abdd1e4e424",
"format": 1
},
{
"name": "tests/unit/modules/dnac/test_site_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4efb20519d92b39f4d1091457a0ee5681b2137e73277e3a617af61a985acef4",
+ "chksum_sha256": "6b9dfa499cdb0d64d7d4414e98ea1dbfc0384ba7ba355afe7bd3f3c9f081021b",
"format": 1
},
{
"name": "tests/unit/modules/dnac/test_template_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a8e1e25087ebc5b2edd58533a4ee24db2e77cc89bfefb8c28cb6bb0079a667d",
+ "chksum_sha256": "fcd5ca0b6c1ffc6542015e38fad6288b58531fbba2ade9fdc80b7df7fb55522e",
"format": 1
},
{
@@ -5604,7 +5709,14 @@
"name": "tests/unit/modules/dnac/test_swim_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af4d5f56d161927261cf68132538567b797f863faeb1660fa27cfb08d24fa525",
+ "chksum_sha256": "0f4d8453fb04ef7e46f4ba48546d80bc0fb1e71d7f128a3b8292017ca94d7247",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/dnac/test_discovery_intent.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "054fd5da3991950db79c7a3f7d530ae834968f871b559b55b11d16780f0fcc92",
"format": 1
},
{
@@ -5646,7 +5758,7 @@
"name": "tests/unit/modules/dnac/test_pnp_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5ee33d3e95872eb34e8edcb96fe864d517531a4aebe1024f38ffe90fa701a22",
+ "chksum_sha256": "be8d988a78e2240988de5892dedd9aa6ab3cf8773ab056228c71fb7e80d9f4fc",
"format": 1
},
{
@@ -5660,49 +5772,49 @@
"name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b4d18e01e2f9751bea0b06b4767bea1020935e9a790d0e2d10ed0eb94ea95fd",
+ "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
"format": 1
},
{
"name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b7f6f13bb03172a3f52c77fea1438fa0496ee0f2f28d911efe7fbe01157cedb",
+ "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
"format": 1
},
{
"name": "tests/sanity/ignore-2.11.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6cf1d519778d972db4420a14be5e5711bd089185ffd2826e3edeb824d6eef1b0",
+ "chksum_sha256": "4fcac0d5107e0b7315afdb8987dcee501dc86636551b08ae1a8c4ba5bc63d39b",
"format": 1
},
{
"name": "tests/sanity/ignore-2.10.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4ab069bf1409c5942cb673812da6bbf6611578eefe410e4ad0561d8f42f6bba",
+ "chksum_sha256": "3c4651e8b4cdfa9af71c2fc40a99ea7ab83fc4b90a5b353f5f4bd6315ca77b49",
"format": 1
},
{
"name": "tests/sanity/ignore-2.12.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "413c41b8214cecc32374e4a2cddc8f2ee4a92bdbe3ed9fae886157fb665171b0",
+ "chksum_sha256": "29d8543143d22a474e2851734aaa32b46770150b47be01d0d4e5fa2807006b68",
"format": 1
},
{
"name": "tests/sanity/ignore-2.13.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b4d18e01e2f9751bea0b06b4767bea1020935e9a790d0e2d10ed0eb94ea95fd",
+ "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
"format": 1
},
{
"name": "tests/sanity/ignore-2.9.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4ab069bf1409c5942cb673812da6bbf6611578eefe410e4ad0561d8f42f6bba",
+ "chksum_sha256": "3c4651e8b4cdfa9af71c2fc40a99ea7ab83fc4b90a5b353f5f4bd6315ca77b49",
"format": 1
},
{
@@ -5716,7 +5828,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48a2f61c72296aaa190a3ad670b0a5759bd047262b59b3d870e986b05878d87d",
+ "chksum_sha256": "2e182a22d58fcfbca71fbef1992147e1cd87fc67648750e62b5955ec72521520",
"format": 1
},
{
@@ -5734,6 +5846,20 @@
"format": 1
},
{
+ "name": "playbooks/site_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ade7b717ade350cadefeff3071a9403346dfaab40194c929157b4fc63af0b8a9",
+ "format": 1
+ },
+ {
+ "name": "playbooks/PnP_Workflow_Manager_Playbook.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "376cbea818770893c8c6eb980dcc8a64817ed71fc89808e1877cbbb0d262e810",
+ "format": 1
+ },
+ {
"name": "playbooks/download_device_cmd_runner_output.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5758,7 +5884,14 @@
"name": "playbooks/template_pnp_intent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1926a010cdc414d3ccab5a2e2a35175e09ccfa60ebf8e82493383b5b37e9396",
+ "chksum_sha256": "9fe031eff47ca38270df4ff20de531a150ec74e316dbe1d532f3bb2ad2b93d82",
+ "format": 1
+ },
+ {
+ "name": "playbooks/device_provision_workflow.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "28bd738eea40bad4819ae44803c03830e448ae3b750d795572e3872b42bb43cd",
"format": 1
},
{
@@ -5769,6 +5902,13 @@
"format": 1
},
{
+ "name": "playbooks/discovery_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "30694e8124cdeb36a2507b8f076dfd6635352d8eaaa5d742bb9a0a6944588e64",
+ "format": 1
+ },
+ {
"name": "playbooks/wireless_enterprise_ssid.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5779,7 +5919,7 @@
"name": "playbooks/discovery.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10437949e7c63f902ba760727984f08f28752186dff7a73390fafd4a433f42d7",
+ "chksum_sha256": "d24603f24015ad2d877e3692c1b53a430786c3509e735afacc22d2f1d39fa211",
"format": 1
},
{
@@ -5790,10 +5930,24 @@
"format": 1
},
{
- "name": "playbooks/device_details.yml",
+ "name": "playbooks/swim_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7ca211a21a1fd2aa2e98acb6baede405567d47c162b29ec1f4576c201f2c659b",
+ "format": 1
+ },
+ {
+ "name": "playbooks/PnP.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2391ea6223b5ff5f7f6402386a44318f78ea2cb0e170494b7f30e07eb59a8709",
+ "chksum_sha256": "0ace6a2cfb7cadc96e527dea4dad9616c35769151456271248cdea3c39a6c1df",
+ "format": 1
+ },
+ {
+ "name": "playbooks/site_intent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf39fb8203aad21846dca9ff98a099bfa01dd52a9aa596716a42c810c06dc49a",
"format": 1
},
{
@@ -5804,6 +5958,41 @@
"format": 1
},
{
+ "name": "playbooks/inventory_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e88873aa70c403e7753237e113477a7ec0fde19528fb457d20383355d360a854",
+ "format": 1
+ },
+ {
+ "name": "playbooks/device_details.template",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "92ccdcdfe15f46252c522238c97e5158d18676eccddd3a85f23a8f497b4e0c0d",
+ "format": 1
+ },
+ {
+ "name": "playbooks/inventory_device.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9602c04ad1158e2d603dcbe0839b35b1dfdc15030ce19d1d5111c3f7be70dcc1",
+ "format": 1
+ },
+ {
+ "name": "playbooks/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "playbooks/tests/test-1592357065255.csv",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9747b473294264a386ac43b9b83ea11eaf2dac5cce86ed78d971ad9aa001cef9",
+ "format": 1
+ },
+ {
"name": "playbooks/import_image_file.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5825,6 +6014,34 @@
"format": 1
},
{
+ "name": "playbooks/device_credential_intent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3545f9b0789095e7bfccfd748584a7a80887278794a46b862184d0b49553c80c",
+ "format": 1
+ },
+ {
+ "name": "playbooks/user_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a65f8048638f8594daf2c4be04daf24edc60a4a1b6b7358fe0c77e2bc2128ad5",
+ "format": 1
+ },
+ {
+ "name": "playbooks/discovery_intent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d18a801d230750d4d0dd6feb2fadcdcca4b778186a1f386df15081d151914cfc",
+ "format": 1
+ },
+ {
+ "name": "playbooks/swim_intent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d6facda29ca42ef073cf58d9c54ae074c8217abdc68e926dbc59da5e31c8cfab",
+ "format": 1
+ },
+ {
"name": "playbooks/application_sets.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5835,7 +6052,7 @@
"name": "playbooks/credentials.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb277ffd91b34ff6f2bf5f114a0f0386ae0ea4f36f8e6627438aff707a6c3b14",
+ "chksum_sha256": "a63e2b2bf00ec5abdfd77de7b4149bcfb2cba0f36b6728bc0d203d7d7f39e72f",
"format": 1
},
{
@@ -5846,6 +6063,13 @@
"format": 1
},
{
+ "name": "playbooks/swim_import_local.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b8041625998d470827195c1f805f9c8a1be3489c2beed9fbad07dbb02c9c8f60",
+ "format": 1
+ },
+ {
"name": "playbooks/tag.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5853,6 +6077,13 @@
"format": 1
},
{
+ "name": "playbooks/device_provision.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3d0a52d4599f421cadc817a52aeb92d0116b7d888d6b9365b3b85057b1d7091b",
+ "format": 1
+ },
+ {
"name": "playbooks/image_details.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5877,7 +6108,14 @@
"name": "playbooks/test_swim_module.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85899c9c79904c483cde369a146a6683dfc5affe3e9ca36e9469e6e95bf224d3",
+ "chksum_sha256": "e07ea74f636b0fb255a062808c77e21502df6404217cb4edf6de6380732afac7",
+ "format": 1
+ },
+ {
+ "name": "playbooks/network_settings_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c8ae63ded3e85bf002f0bb9d0561f1c58ed22d034d72271ee97642425ca07406",
"format": 1
},
{
@@ -5888,6 +6126,13 @@
"format": 1
},
{
+ "name": "playbooks/template_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1097e1586a9408998c3a1d513ad70492849e26d052b20816e27ebb60b8ee1d94",
+ "format": 1
+ },
+ {
"name": "playbooks/network_device_info.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5905,7 +6150,7 @@
"name": "playbooks/global_credentials.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f500d4c375b20d3dc174cf6b1edd4fa529160c20922dc63ac8682da5d63d86a",
+ "chksum_sha256": "c9a55e35903f75c5afcd0d42fefe8a61fe115d3e9c7f4ade131bf9cf8a253bb9",
"format": 1
},
{
@@ -5923,6 +6168,13 @@
"format": 1
},
{
+ "name": "playbooks/network_settings_intent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "77d20576b26b5c3d172ce085c117354960130ec7592a9df56875ab575dd9ed74",
+ "format": 1
+ },
+ {
"name": "playbooks/reserve_ip_subpool.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5930,6 +6182,13 @@
"format": 1
},
{
+ "name": "playbooks/device_credential_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "da77ff1743c952a728532bfc40984e7b3a156f06889b257fd91e8cd203f7c06b",
+ "format": 1
+ },
+ {
"name": "playbooks/sda_fabric_site.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5947,7 +6206,7 @@
"name": "playbooks/credentials.template",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0e5ce3459671207879fa4881889f46767ed75882b9277578606632a1c300d65",
+ "chksum_sha256": "a1331022feae8b2d74bd4ae2ccc65f98a7e5e4fdb0fd6286ed3a626de3a26603",
"format": 1
},
{
@@ -5989,7 +6248,7 @@
"name": "docs/conf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a47bea0b843845267a40acddd4960a5c67131dc86d697a24ff7214889a617574",
+ "chksum_sha256": "27cf144a95b88a9a4f6dc5a4c584da75af99bdeee14bfada99f75aa53dd740e6",
"format": 1
},
{
@@ -6024,21 +6283,21 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ac122d274c18214ef25b748224c3b2a55e71255f22a7d71adbc24f588ae141f",
+ "chksum_sha256": "bcad14cdf9a6044c9347400ea1cbd6432ddd38c4b5b12e78a54bca57fa96355a",
"format": 1
},
{
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c97dd8e26d758882f807a72ac06f5b5217188365327103321684999d95aba9af",
+ "chksum_sha256": "f1c14af9aa1e4bf92f028daab559d8912231284b04beeb34459404d0cc085ef1",
"format": 1
},
{
"name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d814d50e57d0fd49615568f9de9b71ddca4c2f84005ab8e75a38030baecbe205",
+ "chksum_sha256": "2257ec70ce9b5b47aee4c1668ee094ec6bb87856b58949eea80d2dedc75ab59b",
"format": 1
},
{
@@ -6059,7 +6318,7 @@
"name": ".github/workflows/sanity_tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee7a36fc43cbe0b847c07915049ba841f43e40084daa6babbf518c7e8e203f68",
+ "chksum_sha256": "5f73c04a8f73f124035ead77d2dfa8418bea6af501ffc3ddaf20eb04abac3bd6",
"format": 1
},
{
diff --git a/ansible_collections/cisco/dnac/MANIFEST.json b/ansible_collections/cisco/dnac/MANIFEST.json
index 00065eeb0..4d4e0c65d 100644
--- a/ansible_collections/cisco/dnac/MANIFEST.json
+++ b/ansible_collections/cisco/dnac/MANIFEST.json
@@ -2,15 +2,21 @@
"collection_info": {
"namespace": "cisco",
"name": "dnac",
- "version": "6.7.2",
+ "version": "6.13.1",
"authors": [
"Rafael Campos <rcampos@altus.cr>",
"William Astorga <wastorga@altus.cr>",
"Jose Bogarin <jbogarin@altus.cr>",
"Bryan Vargas <bvargas@altus.cr>",
"Francisco Mu\u00f1oz <fmunoz@altus.cr>",
- "Madhan Sankaranarayanan (@madhansansel)",
- "Rishita Chowdhary (@rishitachowdhary)"
+ "Madhan Sankaranarayanan <madsanka@cisco.com>",
+ "Rishita Chowdhary (@rishitachowdhary)",
+ "Muthu Rakesh Babu <mutbabu@cisco.com>",
+ "Akash Bhaskaran <akabhask@cisco.com>",
+ "Abinash Mishra <abimishr@cisco.com>",
+ "Abhishek Maheshwari <abmahesh@cisco.com>",
+ "Phan Nguyen <phannguy@cisco.com>",
+ "Rugvedi Kapse <rukapse@cisco.com>"
],
"readme": "README.md",
"tags": [
@@ -25,7 +31,7 @@
"license": [],
"license_file": "LICENSE",
"dependencies": {
- "ansible.utils": ">=2.0.0,<3.0"
+ "ansible.utils": ">=2.0.0,<4.0"
},
"repository": "https://github.com/cisco-en-programmability/dnacenter-ansible",
"documentation": "https://cisco-en-programmability.github.io/dnacenter-ansible/",
@@ -36,7 +42,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d457a35ecab2567f175c3665b9e4d08de099f0429135a796f6a5765292ca5dab",
+ "chksum_sha256": "5e6e6d846a1d245059dfee01ac8ee7dbd3af1e69fda28f415a6d670fb8bdbd93",
"format": 1
},
"format": 1
diff --git a/ansible_collections/cisco/dnac/README.md b/ansible_collections/cisco/dnac/README.md
index 1f9e5f11f..2492d75b9 100644
--- a/ansible_collections/cisco/dnac/README.md
+++ b/ansible_collections/cisco/dnac/README.md
@@ -19,7 +19,7 @@ The following table shows the supported versions.
| 2.2.2.3 | 3.3.1 | 2.3.3 |
| 2.2.3.3 | 6.4.0 | 2.4.11 |
| 2.3.3.0 | 6.6.4 | 2.5.5 |
-| 2.3.5.3 | 6.7.2 | 2.6.0 |
+| 2.3.5.3 | 6.13.0 | 2.6.0 |
If your Ansible collection is older please consider updating it first.
diff --git a/ansible_collections/cisco/dnac/changelogs/changelog.yaml b/ansible_collections/cisco/dnac/changelogs/changelog.yaml
index e8f9f436a..a3408e7e5 100644
--- a/ansible_collections/cisco/dnac/changelogs/changelog.yaml
+++ b/ansible_collections/cisco/dnac/changelogs/changelog.yaml
@@ -689,4 +689,163 @@ releases:
changes:
release_summary: Adding documentation for 2.3.5.3 API
minor_changes:
- - Updating documentation, collection supports 2.3.5.3 DNA Center API Version. \ No newline at end of file
+ - Updating documentation, collection supports 2.3.5.3 DNA Center API Version.
+ 6.7.3:
+ release_date: "2023-06-27"
+ changes:
+ release_summary: Updating documentation and fixing global credentials.
+ minor_changes:
+ - Documentation changes in example of site_intent
+ - The global credentials create implementation fixed.
+ - Discovery playbook has no hardcoded credentials id and seed IP.
+ 6.7.4:
+ release_date: "2023-08-24"
+ changes:
+ release_summary: Updated sanity test.
+ minor_changes:
+ - Updated test/sanity and remove unnecessary
+ 6.7.5:
+ release_date: "2023-09-25"
+ changes:
+ release_summary: Updated different function names.
+ bugfixes:
+ - Updated from get_permissions_ap_i to get_permissions_api
+ - Updated from get_roles_ap_i to get_roles_api
+ - Updated from get_users_ap_i to get_users_api
+ - Updated from get_external_authentication_servers_ap_i to get_external_authentication_servers_api
+ 6.7.6:
+ release_date: "2023-10-13"
+ changes:
+ release_summary: Several changes to modules.
+ minor_changes:
+ - A new intent module for network settings to support Global IP Pool, Reserve IP Pool, Global servers, TimeZone, Message of the Day and telemetry servers.
+ - By inheriting DNAC base class, changes done to Swim, Template, PnP intent modules.
+ 6.8.0:
+ release_date: "2023-11-10"
+ changes:
+ release_summary: Several changes to modules.
+ minor_changes:
+ - A new intent module to perform inventory for Adding, Deleting, Resyncing, Updating Devices etc. for all types of devices.
+ - A new intent module to Create, Update and Delete Global Device Credentials and Assign Credentials to a sites.
+ - A new intent module to discover the devices.
+ - Minor changes to swim intent module.
+ 6.8.1:
+ release_date: "2023-11-20"
+ changes:
+ release_summary: Updating documentation and fixing swim issues.
+ minor_changes:
+ - Fixed issues in Swim intent module.
+ - Updated docstring in site intent and template intent modules.
+ 6.8.2:
+ release_date: "2023-12-04"
+ changes:
+ release_summary: Changing galaxy.
+ bugfixes:
+ - Adding support to ansible.utils >=3.0
+ - pnp_device_claim_to_site.py change configInfo from `list` to `dict` #135
+ 6.9.0:
+ release_date: "2023-12-05"
+ changes:
+ release_summary: Changing galaxy.
+ minor_changes:
+ - Adding support to ansible.utils ">=2.0.0, <4.00".
+ 6.10.0:
+ release_date: "2023-12-18"
+ changes:
+ release_summary: Changes in intent module to support discovery, provisioning and more operations in swim, site, template and pnp intent modules.
+ minor_changes:
+ - Changes in inventory_intent module to support Update Device role, update interface details.
+ - Changes in inventory_intent module to create and assign Global User defined field to devices, Delete Global UDF, Delete Provisioned/Unprovisioned devices.
+ - Changes in pnp_intent module to support bulk addition and bulk deletion along with variable names.
+ - Handling idempotent in delete operation in site_intent module.
+ - Adding discovery intent module to support discovering the devices.
+ - Changes in swim intent module to support Distribute and activate image by fetching device based on site and device family details
+ - Changes in site intent module to support one-shot site deletion
+ - To Support provisioning wired device, reboot AP's, export device list, delete provisioned devices.
+ - Change the variable names into snake case in all the intent modules for better readability.
+ 6.10.1:
+ release_date: "2024-01-20"
+ changes:
+ release_summary: Changes in network settings, site, discovery, inventory, swim, credential and provisioning intent modules
+ minor_changes:
+ - Introducing config_verify to verify the state operations in Catalyst Center in network settings and site intent module
+ - Changes to support inventory and provisioning intent modules
+ 6.10.2:
+ release_date: "2024-01-24"
+ changes:
+ release_summary: Set dnac log level if it is not set in the playbook.
+ minor_changes:
+ - Set dnac log level if it is not set in the playbook.
+ - Handle provisioning of device if it is not in managed state for longer.
+ - Set the logging levels for device credentails intent module.
+ 6.10.3:
+ release_date: "2024-01-30"
+ changes:
+ release_summary: Fixing swim_import_local action.
+ minor_changes:
+ - Building additional parameters needed in sdk.
+ 6.10.4:
+ release_date: "2024-02-05"
+ changes:
+ release_summary: Enhancements in Cisco Catalyst Center logging
+ minor_changes:
+ - Introducing log levels and log file path
+ - Updated Documentation in template intent module
+ - Enhancements in device_credential, inventory, discovery and template intent modules.
+ 6.11.0:
+ release_date: "2024-02-17"
+ changes:
+ release_summary: Adding new workflow manager modules in Cisco Catalyst Center
+ minor_changes:
+ - The 'site_workflow_manager' module orchestrates the creation of sites within the Cisco Catalyst Center, encompassing areas such as buildings and floors. It ensures necessary pre-checks are performed and allows for subsequent updates to these sites. Additionally, the module facilitates the deletion of specific sites using the site and parent names. A feature to delete all child sites by specifying only the parent site name is also available.
+ - The 'swim_workflow_manager' module handles the importation of SWIM images into the Cisco Catalyst Center, utilizing either a remote URL or a local image file path. It provides functionality for tagging and untagging SWIM images based on device family, role, and site. The module ensures the successful importation of images for distribution and activation on devices within the Cisco Catalyst Center. It also allows for the retrieval of a list of devices tied to a specific site, device family, and device role, facilitating various SWIM operations such as importing, tagging, distribution, and activation.
+ - The 'network_settings_workflow_manager' module manages global IP pool allocation, reserved sub pool assignment, and network function administration, including DHCP, Syslog, SNMP, NTP, Network AAA, client and endpoint AAA, and DNS servers, ensuring seamless operation at site and global levels in the Cisco Catalyst Center.
+ - The 'device_credential_workflow_manager' module oversees the management of global device credentials, including CLI, SNMPv2C read, SNMPv2C write, SNMPv3, HTTP(s) read, and HTTP(s) write. It facilitates the assignment of these credentials to specific sites, ensuring secure and efficient access to network devices across the infrastructure in the Cisco Catalyst Center.
+ - The 'inventory_workflow_manager' module is responsible for the actions that can be performed over devices which includes adding, deleting, resyncing, updating device details, device credentials, common info etc. for all types of devices - network device, compute device, meraki device, firepower management system device and third party devices. Exporting devices details and device credentials details into the CSV file, doing wired/wireless device provisioning, reboot AP devices, resyncing of device etc. Also we can update device just by giving the parameter that need to be changes on single or bulk devices and rest required parameters will be fetched from Cisco Catalyst Center and prepopulate it before triggering the update API.
+ - The 'pnp_workflow_manager' module helps in adding a device or adding devices in bulk to PnP database of the Cisco Catalyst Center. Post addition, device can be claimed to a site along with template provision and image upgrade. Along with that devices can be deleted from the PnP database.
+ - The 'discovery_workflow_manager' module streamlines the discovery of devices using various methods including single IP, IP range, multi-range, CDP, CIDR, and LLDP. It also offers the ability to clear out discoveries by deleting them from the discovery database, with an option to delete all discoveries simultaneously.
+ - The 'provision_workflow_manager' module provisions and re-provisions devices added in the inventory to site, by taking management IP address as input. It allows provisioning of both wired and wireless devices. It also allows un-provisioning of devices.
+ - The 'template_workflow_manager' module is responsible for overseeing templates, export projects/templates, and import projects/templates. It handles configuration templates by enabling the creation, updating, and deletion of templates and projects. Additionally, the module supports export functionality to retrieve project and template details from Cisco Catalyst Center, and Import functionality to create templates and projects within the Cisco Catalyst Center.
+ 6.12.0:
+ release_date: "2024-03-06"
+ changes:
+ release_summary: Enhancements in discovery and inventory workflow manager modules.
+ minor_changes:
+ - Changes in discovery workflow manager module to support SNMP credentials v2 and handling error messages.
+ - Changes in inventory workflow manager module to support snmp v2.
+ - swim_workflow_manager - attribute 'device_series_name' was added.
+ - swim_intent - attribute 'device_series_name' was added.
+ - discovery_workflow_manager - attribute 'global_credentials' was added and 'global_cli_len' was removed.
+ - discovery_intent - attribute 'global_credentials' was added and 'global_cli_len' was removed.
+ - >
+ inventory_workflow_manager - attributes 'serial_number', 'device_added', 'role_source' were removed.
+ attributes 'clear_mac_address_table', 'device_ip', 'resync_retry_count', 'resync_retry_interval',
+ 'reprovision_wired_device', 'provision_wireless_device' were added.
+ Renamed argument from 'ip_address' to 'ip_address_list'.
+ - >
+ inventory_intent - attributes 'serial_number', 'device_added', 'role_source' were removed.
+ attributes 'clear_mac_address_table', 'device_ip', 'resync_retry_count', 'resync_retry_interval',
+ 'reprovision_wired_device', 'provision_wireless_device' were added.
+ Renamed argument from 'ip_address' to 'ip_address_list'.
+ - pnp_workflow_manager - Adding fix for Stackswitch getting changed to normal switch post editing the device's info.
+ - pnp_intent - Adding fix for Stackswitch getting changed to normal switch post editing the device's info.
+ 6.13.0:
+ release_date: "2024-03-12"
+ changes:
+ release_summary: Changes the minimum supported version of Ansible to v2.14.0
+ minor_changes:
+ - Changes the minimum supported version from Ansible v2.9.10 to v2.14.0
+ 6.13.1:
+ release_date: "2024-03-15"
+ changes:
+ release_summary: Enhancements in discovery, site, swim and inventory workflow manager modules.
+ minor_changes:
+ - Introduced sample playbooks for the discovery module.
+ - Resolved a 'NoneType' error in discovery module credentials.
+ - Corrected site creation issues in the site module when optional parameters are missing.
+ - Fixed management IP updates for devices on SNMP version v2.
+ - Addressed image un-tagging issues in inherited site settings.
+ - Provided documentation for EWLC templates in Cisco Catalyst Center version 2.3.7.x.
+ - Added attributes 'dnac_api_task_timeout' and 'dnac_task_poll_interval' in intent and workflow_manager modules.
+ - inventory_workflow_manager - Added attributes 'add_user_defined_field', 'update_interface_details', 'export_device_list' and 'admin_status'
+ - inventory_workflow_manager - Removed attributes 'provision_wireless_device', 'reprovision_wired_device'
diff --git a/ansible_collections/cisco/dnac/docs/conf.py b/ansible_collections/cisco/dnac/docs/conf.py
index e7b2f9107..8122fe4d6 100644
--- a/ansible_collections/cisco/dnac/docs/conf.py
+++ b/ansible_collections/cisco/dnac/docs/conf.py
@@ -59,6 +59,9 @@ source_suffix = {
'.md': 'markdown',
}
+# Avoid substitution of smartquotes
+smartquotes = False
+
# The master toctree document.
master_doc = 'index'
diff --git a/ansible_collections/cisco/dnac/meta/runtime.yml b/ansible_collections/cisco/dnac/meta/runtime.yml
index 1f18fd726..bcb6dcedc 100644
--- a/ansible_collections/cisco/dnac/meta/runtime.yml
+++ b/ansible_collections/cisco/dnac/meta/runtime.yml
@@ -1,2 +1,2 @@
---
-requires_ansible: '>=2.9.10' \ No newline at end of file
+requires_ansible: '>=2.14.0' \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/PnP.yml b/ansible_collections/cisco/dnac/playbooks/PnP.yml
new file mode 100644
index 000000000..63bad68e0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/PnP.yml
@@ -0,0 +1,111 @@
+---
+- name: Manage operations - Add, claim, and delete devices of Onboarding Configuration (PnP)
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log_level: DEBUG
+
+ tasks:
+
+ - name: Import devices in bulk
+ cisco.dnac.pnp_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: False
+
+ - serial_number: QTC2320E0H9
+ state: Unclaimed
+ pid: c9300-24P
+ hostname: Test-123
+
+ - serial_number: ETC2320E0HB
+ state: Unclaimed
+ pid: c9300-24P
+
+ - name: Add a new device and claim it
+ cisco.dnac.pnp_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ device_info:
+ - serial_number: FJC2330E0BB
+ hostname: Test-9300-10
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: True
+
+ - name: Claim a pre-added switch, apply a template, and perform an image upgrade for a specific site
+ cisco.dnac.pnp_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ template_name: "Ansible_PNP_Switch"
+ image_name: cat9k_iosxe_npe.17.03.07.SPA.bin
+ project_name: Onboarding Configuration
+ template_details:
+ hostname: SJC-Switch-1
+ interface: TwoGigabitEthernet1/0/2
+ device_info:
+ - serial_number: FJC271924EQ
+ hostname: Switch
+ state: Unclaimed
+ pid: C9300-48UXM
+
+ - name: Claim an existing Wireless Controller, apply a template, and upgrade its image for a specified site
+ cisco.dnac.pnp_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ pnp_type: CatalystWLC
+ template_name: "Ansible_PNP_WLC"
+ image_name: C9800-40-universalk9_wlc.17.12.01.SPA.bin
+ template_params:
+ hostname: IAC-EWLC-Claimed
+ device_info:
+ - serial_number: FOX2639PAY7
+ hostname: New_WLC
+ state: Unclaimed
+ pid: C9800-CL-K9
+ gateway: 204.192.101.1
+ ip_interface_name: TenGigabitEthernet0/0/0
+ static_ip: 204.192.101.10
+ subnet_mask: 255.255.255.0
+ vlan_id: 1101
+
+ - name: Remove multiple devices from the PnP dashboard safely (ignores non-existent devices)
+ cisco.dnac.pnp_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: deleted
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7 #Will get deleted
+ - serial_number: FTC2320E0HA #Doesn't exist in the inventory
+ - serial_number: FKC2310E0HB #Doesn't exist in the inventory \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/PnP_Workflow_Manager_Playbook.yml b/ansible_collections/cisco/dnac/playbooks/PnP_Workflow_Manager_Playbook.yml
new file mode 100644
index 000000000..846ebf3a7
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/PnP_Workflow_Manager_Playbook.yml
@@ -0,0 +1,111 @@
+---
+- name: Manage operations - Add, claim, and delete devices of Onboarding Configuration (PnP)
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log_level: DEBUG
+
+ tasks:
+
+ - name: Import devices in bulk
+ cisco.dnac.pnp_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: False
+
+ - serial_number: QTC2320E0H9
+ state: Unclaimed
+ pid: c9300-24P
+ hostname: Test-123
+
+ - serial_number: ETC2320E0HB
+ state: Unclaimed
+ pid: c9300-24P
+
+ - name: Add a new device and claim it
+ cisco.dnac.pnp_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ device_info:
+ - serial_number: FJC2330E0BB
+ hostname: Test-9300-10
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: True
+
+ - name: Claim a pre-added switch, apply a template, and perform an image upgrade for a specific site
+ cisco.dnac.pnp_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ template_name: "Ansible_PNP_Switch"
+ image_name: cat9k_iosxe_npe.17.03.07.SPA.bin
+ project_name: Onboarding Configuration
+ template_details:
+ hostname: SJC-Switch-1
+ interface: TwoGigabitEthernet1/0/2
+ device_info:
+ - serial_number: FJC271924EQ
+ hostname: Switch
+ state: Unclaimed
+ pid: C9300-48UXM
+
+ - name: Claim an existing Wireless Controller, apply a template, and upgrade its image for a specified site
+ cisco.dnac.pnp_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ pnp_type: CatalystWLC
+ template_name: "Ansible_PNP_WLC"
+ image_name: C9800-40-universalk9_wlc.17.12.01.SPA.bin
+ template_params:
+ hostname: IAC-EWLC-Claimed
+ device_info:
+ - serial_number: FOX2639PAY7
+ hostname: New_WLC
+ state: Unclaimed
+ pid: C9800-CL-K9
+ gateway: 204.192.101.1
+ ip_interface_name: TenGigabitEthernet0/0/0
+ static_ip: 204.192.101.10
+ subnet_mask: 255.255.255.0
+ vlan_id: 1101
+
+ - name: Remove multiple devices from the PnP dashboard safely (ignores non-existent devices)
+ cisco.dnac.pnp_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: deleted
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7 #Will get deleted
+ - serial_number: FTC2320E0HA #Doesn't exist in the inventory
+ - serial_number: FKC2310E0HB #Doesn't exist in the inventory
diff --git a/ansible_collections/cisco/dnac/playbooks/credentials.template b/ansible_collections/cisco/dnac/playbooks/credentials.template
index e621bdaa5..5270c0816 100644
--- a/ansible_collections/cisco/dnac/playbooks/credentials.template
+++ b/ansible_collections/cisco/dnac/playbooks/credentials.template
@@ -5,4 +5,7 @@ dnac_username: <username>
dnac_password: <password>
dnac_version: 2.3.5.3
dnac_verify: False
-dnac_debug: False \ No newline at end of file
+dnac_debug: False
+dnac_log_level: [CRITICAL, ERROR, WARNING, INFO, DEBUG]
+dnac_log_file_path: <file_path>
+dnac_log_append: True
diff --git a/ansible_collections/cisco/dnac/playbooks/credentials.yml b/ansible_collections/cisco/dnac/playbooks/credentials.yml
index 5413d0e8f..133e03cb9 100644
--- a/ansible_collections/cisco/dnac/playbooks/credentials.yml
+++ b/ansible_collections/cisco/dnac/playbooks/credentials.yml
@@ -1,8 +1,8 @@
---
-dnac_host: 192.168.196.2
+dnac_host: 100.119.103.190
dnac_port: 443
-dnac_username: admin
-dnac_password: Maglev123
-dnac_version: 2.2.2.3
+dnac_username: cloverhound_user
+dnac_password: LABchsys!23$
+dnac_version: 2.3.5.3
dnac_verify: False
-dnac_debug: True
+dnac_debug: True \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/device_credential_intent.yml b/ansible_collections/cisco/dnac/playbooks/device_credential_intent.yml
new file mode 100644
index 000000000..bd5834ffe
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/device_credential_intent.yml
@@ -0,0 +1,119 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: no
+ connection: local
+ tasks:
+#
+# Project Info Section
+#
+
+ - name: Create Credentials and assign it to a site.
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ state: merged
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: CLI1
+ username: cli1
+ password: '12345'
+ enable_password: '12345'
+ # old_description:
+ # old_username:
+ # id: e448ea13-4de0-406b-bc6e-f72b57ed6746 # Use this for updation or deletion
+ snmp_v2c_read:
+ - description: SNMPv2c Read1 # use this for deletion
+ read_community: '123456'
+ # old_description: # use this for updating the description
+ # id: 0ee7d677-8804-43f2-8b6c-599c5f18348f # Use this for updation or deletion
+ snmp_v2c_write:
+ - description: SNMPv2c Write1 # use this for deletion
+ write_community: '123456'
+ # old_description: # use this for updating the description
+ # id: a96abc1b-1fd6-41f1-8a6d-a5569c17262d # Use this for updation or deletion
+ snmp_v3:
+ - auth_password: '12345678' # Atleast 8 characters
+ auth_type: SHA # [SHA, MD5] (SHA is recommended)
+ snmp_mode: AUTHPRIV # [AUTHPRIV, AUTHNOPRIV, NOAUTHNOPRIV]
+ privacy_password: '12345678' # Atleast 8 characters
+ privacy_type: AES128 # [AE128, AE192, AE256]
+ username: snmpV31
+ description: snmpV31
+ # old_description:
+ # id: d8974823-250a-41b0-8c9b-b27b2ae01472 # Use this for updation or deletion
+ https_read:
+ - description: HTTP Read1
+ username: HTTP_Read1
+ password: '12345'
+ port: 443
+ # old_description:
+ # old_username:
+ # id: a7ef9995-e404-4240-94ca-b5f37f65c19d # Use this for updation or deletion
+ https_write:
+ - description: HTTP Write1
+ username: HTTP_Write1
+ password: '12345'
+ port: 443
+ # old_description:
+ # old_username:
+ # id: bec9818e-30cd-468b-bf75-292beefc2e20 # Use this for updation or deletion
+ assign_credentials_to_site:
+ cli_credential:
+ # description: CLI
+ # username: cli
+ id: 2fc5f7d4-cf15-4a4f-99b3-f086e8dd6350
+ snmp_v2c_read:
+ # description: SNMPv2c Read
+ id: a966a4e5-9d11-4683-8edc-a5ad8fa59ee3
+ snmp_v2c_write:
+ # description: SNMPv2c Write
+ id: 7cd072a4-2263-4087-b6ec-93b20958e286
+ snmp_v3:
+ # description: snmpV3
+ id: c08a1797-84ce-4add-94a3-b419b13621e4
+ https_read:
+ # description: HTTP Read
+ # username: HTTP_Read
+ id: 1009725d-373b-4e7c-a091-300777e2bbe2
+ https_write:
+ # description: HTTP Write
+ # username: HTTP_Write
+ id: f1ab6e3d-01e9-4d87-8271-3ac5fde83980
+ site_name:
+ - Global/Chennai/Trill
+ - Global/Chennai/Tidel
+
+ - name: Delete Credentials
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ state: deleted
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: CLI1
+ username: cli1
+ snmp_v2c_read:
+ - description: SNMPv2c Read1 # use this for deletion
+ snmp_v2c_write:
+ - description: SNMPv2c Write1 # use this for deletion
+ snmp_v3:
+ - description: snmpV31
+ https_read:
+ - description: HTTP Read1
+ username: HTTP_Read1
+ https_write:
+ - description: HTTP Write1
+ username: HTTP_Write1
diff --git a/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml
new file mode 100644
index 000000000..3d77584f0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml
@@ -0,0 +1,103 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: no
+ connection: local
+ tasks:
+ - name: Create Credentials and assign it to a site.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ state: merged
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: CLI1
+ username: cli1
+ password: '12345'
+ enable_password: '12345'
+ # old_description:
+ # old_username:
+ snmp_v2c_read:
+ - description: SNMPv2c Read1 # use this for deletion
+ read_community: '123456'
+ # old_description: # use this for updating the description
+ snmp_v2c_write:
+ - description: SNMPv2c Write1 # use this for deletion
+ write_community: '123456'
+ # old_description: # use this for updating the description
+ snmp_v3:
+ - auth_password: '12345678' # Atleast 8 characters
+ auth_type: SHA # [SHA, MD5] (SHA is recommended)
+ snmp_mode: AUTHPRIV # [AUTHPRIV, AUTHNOPRIV, NOAUTHNOPRIV]
+ privacy_password: '12345678' # Atleast 8 characters
+ privacy_type: AES128 # [AE128, AE192, AE256]
+ username: snmpV31
+ description: snmpV31
+ # old_description:
+ https_read:
+ - description: HTTP Read1
+ username: HTTP_Read1
+ password: '12345'
+ port: 443
+ # old_description:
+ # old_username:
+ https_write:
+ - description: HTTP Write1
+ username: HTTP_Write1
+ password: '12345'
+ port: 443
+ # old_description:
+ # old_username:
+ assign_credentials_to_site:
+ cli_credential:
+ description: CLI
+ username: cli
+ snmp_v2c_read:
+ description: SNMPv2c Read
+ snmp_v2c_write:
+ description: SNMPv2c Write
+ snmp_v3:
+ description: snmpV3
+ https_read:
+ description: HTTP Read
+ username: HTTP_Read
+ https_write:
+ description: HTTP Write
+ username: HTTP_Write
+ site_name:
+ - Global/Chennai/Trill
+ - Global/Chennai/Tidel
+
+ - name: Delete Credentials
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ state: deleted
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: CLI1
+ username: cli1
+ snmp_v2c_read:
+ - description: SNMPv2c Read1 # use this for deletion
+ snmp_v2c_write:
+ - description: SNMPv2c Write1 # use this for deletion
+ snmp_v3:
+ - description: snmpV31
+ https_read:
+ - description: HTTP Read1
+ username: HTTP_Read1
+ https_write:
+ - description: HTTP Write1
+ username: HTTP_Write1
diff --git a/ansible_collections/cisco/dnac/playbooks/device_details.template b/ansible_collections/cisco/dnac/playbooks/device_details.template
new file mode 100644
index 000000000..38c95c627
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/device_details.template
@@ -0,0 +1,69 @@
+template_details:
+ - proj_name: 'Onboarding Configuration'
+ device_config: 'hostname cat9k-1\n'
+ language: 'velocity'
+ family: 'Switches and Hubs'
+ type: 'IOS-XE'
+ variant: 'XE'
+ temp_name: 'temp_cat9k-1'
+ description: 'Test Template'
+ export_project:
+ - 'Cloud DayN Templates'
+ export_template:
+ - project_name: 'Cloud DayN Templates'
+ template_name: 'DMVPN Spoke for Branch Router - System Default'
+ import_project:
+ do_version: false
+ payload:
+ - name: 'Onboarding Configuration2'
+ import_template:
+ do_version: false
+ project_name: 'Onboarding Configuration'
+ payload:
+ - name: 'Platinum-Onboarding-Template-J21'
+ device_types:
+ - product_family: 'Switches and Hubs'
+ productSeries: 'Cisco Catalyst 9300 Series Switches'
+ software_type: 'IOS'
+ language: 'JINJA'
+ - name: 'Platinum-Onboarding-Template-J22'
+ device_types:
+ - product_family: 'Switches and Hubs'
+ productSeries: 'Cisco Catalyst 9300 Series Switches'
+ software_type: 'IOS'
+ language: 'JINJA'
+ - name: 'Platinum-Onboarding-Template-J23'
+ device_types:
+ - product_family: 'Switches and Hubs'
+ productSeries: 'Cisco Catalyst 9300 Series Switches'
+ software_type: 'IOS'
+ language: 'JINJA'
+
+device_details:
+ - site_name: 'Global/Chennai/Trill'
+ image_name: 'cat9k_iosxe.17.04.01.SPA.bin'
+ proj_name: 'Onboarding Configuration'
+ temp_name: 'temp_cat9k-1'
+ device_version: '2'
+ device_number: 'AB2425L8M7'
+ device_name: 'Cat9k-1'
+ device_state: 'Unclaimed'
+ device_id: 'C9300-48UXM'
+ - site_name: 'Global/Chennai/Trill'
+ image_name: cat9k_iosxe.17.04.01.SPA.bin'
+ proj_name: 'Onboarding Configuration'
+ temp_name: 'temp_cat9k-2'
+ device_version: '2'
+ device_number: 'CD2425L8M7'
+ device_name: 'Cat9k-2'
+ device_state: 'Unclaimed'
+ device_id: 'C9300-48UXM'
+ - site_name: 'Global/Chennai/Trill'
+ image_name: 'cat9k_iosxe.17.04.01.SPA.bin'
+ proj_name: 'Onboarding Configuration'
+ temp_name: 'temp_cat9k-3'
+ device_version: '2'
+ device_number: 'EF2425L8M7'
+ device_name: 'Cat9k-3'
+ device_state: 'Unclaimed'
+ device_id: 'C9300-48UXM'
diff --git a/ansible_collections/cisco/dnac/playbooks/device_details.yml b/ansible_collections/cisco/dnac/playbooks/device_details.yml
deleted file mode 100644
index 0c9dfdfef..000000000
--- a/ansible_collections/cisco/dnac/playbooks/device_details.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-template_details:
- - proj_name: "Onboarding Configuration"
- device_config: "hostname cat9k-1\n"
- language: "velocity"
- family: "Switches and Hubs"
- type: "IOS-XE"
- variant: "XE"
- temp_name: "temp_cat9k-1"
- description: "Test Template 1"
- - proj_name: "Onboarding Configuration"
- device_config: "hostname cat9k-2\n"
- language: "velocity"
- family: "Switches and Hubs"
- type: "IOS-XE"
- variant: "XE"
- temp_name: "temp_cat9k-2"
- description: "Test Template 2"
- - proj_name: "Onboarding Configuration"
- device_config: "hostname cat9k-3\n"
- language: "velocity"
- family: "Switches and Hubs"
- type: "IOS-XE"
- variant: "XE"
- temp_name: "temp_cat9k-3"
- description: "Test Template 3"
-
-device_details:
- - site_name: "Global/Chennai/Trill"
- image_name: "cat9k_iosxe.17.04.01.SPA.bin"
- proj_name: "Onboarding Configuration"
- temp_name: "temp_cat9k-1"
- device_version: "2"
- device_number: "AB2425L8M7"
- device_name: "Cat9k-1"
- device_state: "Unclaimed"
- device_id: "C9300-25UX"
- - site_name: "Global/Chennai/Trill"
- image_name: "cat9k_iosxe.17.04.01.SPA.bin"
- proj_name: "Onboarding Configuration"
- temp_name: "temp_cat9k-2"
- device_version: "2"
- device_number: "CD2425L8M7"
- device_name: "Cat9k-2"
- device_state: "Unclaimed"
- device_id: "C9300-25UX"
- - site_name: "Global/Chennai/Trill"
- image_name: "cat9k_iosxe.17.04.01.SPA.bin"
- proj_name: "Onboarding Configuration"
- temp_name: "temp_cat9k-3"
- device_version: "2"
- device_number: "EF2425L8M7"
- device_name: "Cat9k-3"
- device_state: "Unclaimed"
- device_id: "C9300-25UX"
diff --git a/ansible_collections/cisco/dnac/playbooks/device_provision.yml b/ansible_collections/cisco/dnac/playbooks/device_provision.yml
new file mode 100644
index 000000000..fe3efe919
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/device_provision.yml
@@ -0,0 +1,37 @@
+---
+- name: Provision and Re-provision wired and wireless devices
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+
+ tasks:
+ - name: Provision a wired device to a site
+ cisco.dnac.provision_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: Global/USA/San Francisco/BGL_18
+ management_ip_address: 204.1.2.2
+
+
+ - name: Unprovision a wired device to a site
+ cisco.dnac.provision_intent:
+ <<: *dnac_login
+ dnac_log: True
+ state: deleted
+ config:
+ - management_ip_address: 204.1.2.2
diff --git a/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml b/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml
new file mode 100644
index 000000000..362556a09
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml
@@ -0,0 +1,38 @@
+---
+- name: Provision and Re-provision wired and wireless devices
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+
+ tasks:
+ - name: Provision a wired device to a site
+ cisco.dnac.workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - site_name_hierarchy: Global/USA/San Francisco/BGL_18
+ management_ip_address: 204.1.1.1
+
+
+ - name: Unprovision a wired device from a site
+ cisco.dnac.workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: deleted
+ config:
+ - management_ip_address: 204.1.1.1
diff --git a/ansible_collections/cisco/dnac/playbooks/discovery.yml b/ansible_collections/cisco/dnac/playbooks/discovery.yml
index d05d201a3..ce9a38770 100644
--- a/ansible_collections/cisco/dnac/playbooks/discovery.yml
+++ b/ansible_collections/cisco/dnac/playbooks/discovery.yml
@@ -1,5 +1,9 @@
---
- hosts: dnac_servers
+ vars:
+ globalCredentialIdList:
+ - "90acbab8-03d5-4726-9c19-e1e51a40b3cd"
+ - "f979d842-f6fd-456a-8137-2cb5113cd2e8"
vars_files:
- credentials.yml
gather_facts: false
@@ -17,7 +21,7 @@
- name: Set fact of filtered discoveries
ansible.builtin.set_fact:
- discoveries_filtered: "{{ discovery_range_result.dnac_response.response | selectattr('name', 'equalto', 'start_discovery_test2')}}"
+ discoveries_filtered: "{{ discovery_range_result.dnac_response.response | selectattr('name', 'equalto', 'DMZ Lab 2')}}"
when:
- discovery_range_result is defined
- discovery_range_result.dnac_response is defined
@@ -54,9 +58,7 @@
state: present
cdpLevel: 16
discoveryType: CDP
- globalCredentialIdList:
- - 90acbab8-03d5-4726-9c19-e1e51a40b3cd
- - f979d842-f6fd-456a-8137-2cb5113cd2e8
+ globalCredentialIdList: "{{globalCredentialIdList}}"
ipAddressList: 10.10.22.22
name: start_discovery_test2
protocolOrder: ssh
@@ -93,9 +95,7 @@
state: present
cdpLevel: 16
discoveryType: CDP
- globalCredentialIdList:
- - 90acbab8-03d5-4726-9c19-e1e51a40b3cd,
- - f979d842-f6fd-456a-8137-2cb5113cd2e8,
+ globalCredentialIdList: "{{globalCredentialIdList}}"
ipAddressList: 10.10.22.22
name: start_discovery_test2
protocolOrder: ssh
diff --git a/ansible_collections/cisco/dnac/playbooks/discovery_intent.yml b/ansible_collections/cisco/dnac/playbooks/discovery_intent.yml
new file mode 100644
index 000000000..9f004802f
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/discovery_intent.yml
@@ -0,0 +1,197 @@
+---
+- name: Discover devices using multiple discovery specific credentials and delete all the discoveries
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: DEBUG
+
+ tasks:
+
+ - name: Execute discovery devices using MULTI RANGE with various global credentials
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Multi_global
+ discovery_type: MULTI RANGE
+ ip_address_list:
+ - 204.1.2.1-204.1.2.5
+ - 204.192.3.40
+ - 204.192.4.200
+ - 204.1.2.6
+ - 204.1.2.7
+ - 204.1.2.8
+ - 204.1.2.9
+ - 204.1.2.10
+ - 204.1.2.11
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234 #Incorrect name passed
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ protocol_order: ssh
+
+ - name: Execute discovery of single device using various discovery specific credentials and all the global credentials
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single IP Discovery
+ discovery_type: "SINGLE"
+ ip_address_list:
+ - 204.1.2.5
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco#123
+ enable_password: Cisco#123
+ http_read_credential:
+ username: string
+ password: Lablab#123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: string
+ community: string
+ snmp_v2_write_credential:
+ desc: string
+ community: string
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab#1234
+ privacy_type: AES256
+ privacy_password: Lablab#1234
+ protocol_order: ssh
+
+ - name: Execute discovery of single device using various discovery specific credentials only
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single without Global Credentials
+ discovery_type: "SINGLE"
+ ip_address_list:
+ - 204.1.2.5
+ use_global_credentials: False
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco#123
+ enable_password: Cisco#123
+ http_read_credential:
+ username: string
+ password: Lablab#123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: string
+ community: string
+ snmp_v2_write_credential:
+ desc: string
+ community: string
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab#1234
+ privacy_type: AES256
+ privacy_password: Lablab#1234
+ protocol_order: ssh
+
+ - name: Execute discovery devices using MULTI RANGE with various discovery specific credentials and all global credentials (max 5 allowed)
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_type: "MULTI RANGE"
+ discovery_name: Multi_range
+ ip_address_list:
+ - 204.1.2.1-204.1.2.100 #It will be taken as 204.1.2.1 - 204.1.2.1
+ - 205.2.1.1-205.2.1.10
+ ip_filter_list:
+ - 204.1.2.5 #Devie with IP 204.1.2.5 won't be discovered
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: admin
+ password: maglev123
+ enable_password: maglev123
+ http_read_credential:
+ username: admin
+ password: maglev123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: new
+ community: password
+ snmp_v3_credential:
+ username: administrator
+ snmp_mode: AUTHPRIV
+ auth_password: admin123
+ auth_type: SHA
+ privacy_type: AES192
+ privacy_password: cisco#123
+ protocol_order: ssh
+ start_index: 1
+ records_to_return: 1000
+ snmp_version: v2
+
+ - name: Execute discovery devices using CDP/LLDP/CIDR leveraging discovery specific credentials and all the global credentials
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CDP_Test_1
+ discovery_type: "CDP" #Can be LLDP and CIDR
+ ip_address_list: #List length should be one
+ - 204.1.2.1
+ cdp_level: 2 #Instead use lldp_level for LLDP and prefix length for CIDR
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: admin
+ password: maglev123
+ enable_password: maglev123
+ protocol_order: ssh
+
+ - name: Execute deletion of single discovery from the dashboard
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: deleted
+ config_verify: True
+ config:
+ - discovery_name: CDP_Test_1
+
+ - name: Execute deletion of all the discoveries from the dashboard
+ cisco.dnac.discovery_intent:
+ <<: *dnac_login
+ state: deleted
+ config_verify: True
+ config:
+ - delete_all: True \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/discovery_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/discovery_workflow_manager.yml
new file mode 100644
index 000000000..6a7e6cf7d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/discovery_workflow_manager.yml
@@ -0,0 +1,197 @@
+---
+- name: Discover devices using multiple discovery specific credentials and delete all the discoveries
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: DEBUG
+
+ tasks:
+
+ - name: Execute discovery devices using MULTI RANGE with various global credentials
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Multi_global
+ discovery_type: MULTI RANGE
+ ip_address_list:
+ - 204.1.2.1-204.1.2.5
+ - 204.192.3.40
+ - 204.192.4.200
+ - 204.1.2.6
+ - 204.1.2.7
+ - 204.1.2.8
+ - 204.1.2.9
+ - 204.1.2.10
+ - 204.1.2.11
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234 #Incorrect name passed
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ protocol_order: ssh
+
+ - name: Execute discovery of single device using various discovery specific credentials and all the global credentials
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single IP Discovery
+ discovery_type: "SINGLE"
+ ip_address_list:
+ - 204.1.2.5
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco#123
+ enable_password: Cisco#123
+ http_read_credential:
+ username: string
+ password: Lablab#123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: string
+ community: string
+ snmp_v2_write_credential:
+ desc: string
+ community: string
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab#1234
+ privacy_type: AES256
+ privacy_password: Lablab#1234
+ protocol_order: ssh
+
+ - name: Execute discovery of single device using various discovery specific credentials only
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single without Global Credentials
+ discovery_type: "SINGLE"
+ ip_address_list:
+ - 204.1.2.5
+ use_global_credentials: False
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco#123
+ enable_password: Cisco#123
+ http_read_credential:
+ username: string
+ password: Lablab#123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: string
+ community: string
+ snmp_v2_write_credential:
+ desc: string
+ community: string
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab#1234
+ privacy_type: AES256
+ privacy_password: Lablab#1234
+ protocol_order: ssh
+
+ - name: Execute discovery devices using MULTI RANGE with various discovery specific credentials and all global credentials (max 5 allowed)
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_type: "MULTI RANGE"
+ discovery_name: Multi_range
+ ip_address_list:
+ - 204.1.2.1-204.1.2.100 #It will be taken as 204.1.2.1 - 204.1.2.1
+ - 205.2.1.1-205.2.1.10
+ ip_filter_list:
+ - 204.1.2.5 #Devie with IP 204.1.2.5 won't be discovered
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: admin
+ password: maglev123
+ enable_password: maglev123
+ http_read_credential:
+ username: admin
+ password: maglev123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: new
+ community: password
+ snmp_v3_credential:
+ username: administrator
+ snmp_mode: AUTHPRIV
+ auth_password: admin123
+ auth_type: SHA
+ privacy_type: AES192
+ privacy_password: cisco#123
+ protocol_order: ssh
+ start_index: 1
+ records_to_return: 1000
+ snmp_version: v2
+
+ - name: Execute discovery devices using CDP/LLDP/CIDR leveraging discovery specific credentials and all the global credentials
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CDP_Test_1
+ discovery_type: "CDP" #Can be LLDP and CIDR
+ ip_address_list: #List length should be one
+ - 204.1.2.1
+ cdp_level: 2 #Instead use lldp_level for LLDP and prefix length for CIDR
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: admin
+ password: maglev123
+ enable_password: maglev123
+ protocol_order: ssh
+
+ - name: Execute deletion of single discovery from the dashboard
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: deleted
+ config_verify: True
+ config:
+ - discovery_name: CDP_Test_1
+
+ - name: Execute deletion of all the discoveries from the dashboard
+ cisco.dnac.discovery_workflow_manager:
+ <<: *dnac_login
+ state: deleted
+ config_verify: True
+ config:
+ - delete_all: True \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/global_credentials.yml b/ansible_collections/cisco/dnac/playbooks/global_credentials.yml
index c485e5d20..dca7df37c 100644
--- a/ansible_collections/cisco/dnac/playbooks/global_credentials.yml
+++ b/ansible_collections/cisco/dnac/playbooks/global_credentials.yml
@@ -17,8 +17,8 @@
validate_response_schema: false
enablePassword: C2sco456!
username: Test2
- password: C2sco456!
- description: Test2
+ password: C2sco456!2
+ description: Test2 Ansible
- name: Create SNMPV3 Credential
cisco.dnac.snmpv3_credential:
@@ -33,4 +33,4 @@
validate_response_schema: false
username: user2
snmpMode: NOAUTHNOPRIV
- description: user2 cred
+ description: user2 222
diff --git a/ansible_collections/cisco/dnac/playbooks/inventory_device.yml b/ansible_collections/cisco/dnac/playbooks/inventory_device.yml
new file mode 100644
index 000000000..e17bf6791
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/inventory_device.yml
@@ -0,0 +1,53 @@
+---
+- name: Configure device credentials on Cisco DNA Center
+ hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "input_inventory.yml"
+ - "credentials.yml"
+ tasks:
+ - name: Add/Update/Resync/Delete the devices in Cisco DNA Center Inventory.
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: true
+ state: merged
+ config:
+ - username: "{{item.username}}"
+ enable_password: "{{item.enable_password}}"
+ password: "{{item.password}}"
+ ip_address: "{{item.ip_address}}"
+ snmp_auth_passphrase: "{{item.snmp_auth_passphrase}}"
+ snmp_auth_protocol: "{{item.snmp_auth_protocol}}"
+ snmp_mode: "{{item.snmp_mode}}"
+ snmp_priv_passphrase: "{{item.snmp_priv_passphrase}}"
+ snmp_priv_protocol: "{{item.snmp_priv_protocol}}"
+ snmp_ro_community: "{{item.snmp_ro_community}}"
+ snmp_rw_community: "{{item.snmp_rw_community}}"
+ snmp_username: "{{item.snmp_username}}"
+ device_updated: "{{item.device_updated}}"
+ clean_config: "{{item.clean_config}}"
+ type: "{{item.type}}"
+ update_device_role:
+ role: "{{item.role}}"
+ role_source: "{{item.role_source}}"
+ add_user_defined_field:
+ name: "{{item.name}}"
+ description: "{{item.description}}"
+ value: "{{item.value}}"
+ provision_wired_device:
+ site_name: "{{item.site_name}}"
+ export_device_list:
+ password: "{{item.export_device_list.password}}"
+ operation_enum: "{{item.export_device_list.operation_enum}}"
+ reboot_device: "{{item.reboot_device}}"
+
+ with_items: "{{ device_details }}"
+ tags:
+ - inventory_device \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/inventory_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/inventory_workflow_manager.yml
new file mode 100644
index 000000000..72bcc39d0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/inventory_workflow_manager.yml
@@ -0,0 +1,67 @@
+---
+- name: Configure device credentials on Cisco DNA Center
+ hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "input_inventory.yml"
+ - "credentials.yml"
+ tasks:
+ - name: Add/Update/Resync/Delete the devices in Cisco DNA Center Inventory.
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: DEBUG
+ dnac_log: true
+ config_verify: true
+ state: merged
+ config:
+ - username: "{{item.username}}"
+ password: "{{item.password}}"
+ enable_password: "{{item.enable_password}}"
+ ip_address_list: "{{item.ip_address_list}}"
+ cli_transport: "{{item.cli_transport}}"
+ snmp_auth_passphrase: "{{item.snmp_auth_passphrase}}"
+ snmp_auth_protocol: "{{item.snmp_auth_protocol}}"
+ snmp_mode: "{{item.snmp_mode}}"
+ snmp_priv_passphrase: "{{item.snmp_priv_passphrase}}"
+ snmp_priv_protocol: "{{item.snmp_priv_protocol}}"
+ snmp_ro_community: "{{item.snmp_ro_community}}"
+ snmp_rw_community: "{{item.snmp_rw_community}}"
+ snmp_username: "{{item.snmp_username}}"
+ credential_update: "{{item.credential_update}}"
+ clean_config: "{{item.clean_config}}"
+ type: "{{item.type}}"
+ device_resync: "{{item.device_resync}}"
+ reboot_device: "{{item.reboot_device}}"
+ role: "{{item.role}}"
+ add_user_defined_field:
+ - name: Test123
+ description: "Added first udf for testing"
+ value: "value123"
+ - name: Test321
+ description: "Added second udf for testing"
+ value: "value321"
+ provision_wired_device:
+ - device_ip: "1.1.1.1"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_pnp"
+ resync_retry_count: 200
+ resync_interval: 2
+ - device_ip: "2.2.2.2"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_test"
+ resync_retry_count: 200
+ resync_retry_interval: 2
+ update_interface_details:
+ description: "{{item.update_interface_details.description}}"
+ interface_name: "{{item.interface_name}}"
+ export_device_list:
+ password: "{{item.export_device_list.password}}"
+
+ with_items: "{{ device_details }}"
+ tags:
+ - inventory_device
diff --git a/ansible_collections/cisco/dnac/playbooks/network_settings_intent.yml b/ansible_collections/cisco/dnac/playbooks/network_settings_intent.yml
new file mode 100644
index 000000000..92d045a4d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/network_settings_intent.yml
@@ -0,0 +1,112 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: no
+ connection: local
+ tasks:
+#
+# Project Info Section
+#
+
+ - name: Create global pool, reserve subpool and network functions
+ cisco.dnac.network_settings_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: Global_Pool2
+ gateway: '' #use this for updating
+ ip_address_space: IPv6 #required when we are creating
+ cidr: 2001:db8::/64 #required when we are creating
+ pool_type: Generic
+ dhcp_server_ips: [] #use this for updating
+ dns_server_ips: [] #use this for updating
+ # prev_name: Global_Pool2
+ reserve_pool_details:
+ ipv6_address_space: True
+ ipv4_global_pool: 100.0.0.0/8
+ ipv4_prefix: True
+ ipv4_prefix_length: 9
+ ipv4_subnet: 100.128.0.0
+ ipv4_gateway: 100.128.0.1
+ # ipv4_dns_servers: [100.128.0.1]
+ name: IP_Pool_3
+ ipv6_prefix: True
+ ipv6_prefix_length: 64
+ ipv6_global_pool: 2001:db8::/64
+ ipv6_subnet: '2001:db8::'
+ site_name: Global/Chennai/Trill
+ slaac_support: True
+ # prev_name: IP_Pool_4
+ type: LAN
+ network_management_details:
+ settings:
+ dhcp_server:
+ - 10.0.0.1
+ dns_server:
+ domain_name: cisco.com
+ primary_ip_address: 10.0.0.2
+ secondary_ip_address: 10.0.0.3
+ client_and_endpoint_aaa: #works only if we system settigns is set
+ ip_address: 10.197.156.42 #Mandatory for ISE, sec ip for AAA
+ network: 10.0.0.20
+ protocol: RADIUS
+ servers: AAA
+ # shared_secret: string #ISE
+ message_of_the_day:
+ banner_message: hello
+ retain_existing_banner: 'true'
+ netflow_collector:
+ ip_address: 10.0.0.4
+ port: 443
+ network_aaa: #works only if we system settigns is set
+ ip_address: 10.0.0.21 #Mandatory for ISE, sec ip for AAA
+ network: 10.0.0.20
+ protocol: TACACS
+ servers: AAA
+ # shared_secret: string #ISE
+ ntp_server:
+ - 10.0.0.5
+ snmp_server:
+ configure_dnac_ip: false
+ # ip_addresses:
+ # - 10.0.0.6
+ syslog_server:
+ configure_dnac_ip: false
+ # ip_addresses:
+ # - 10.0.0.7
+ timezone: GMT
+ site_name: Global/Chennai
+
+ - name: Delete Global Pool and Release Pool Reservation
+ cisco.dnac.network_settings_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: deleted
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: Global_Pool2
+ reserve_pool_details:
+ name: IP_Pool_3
+ site_name: Global/Chennai/Trill
diff --git a/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml
new file mode 100644
index 000000000..36b88ac2d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml
@@ -0,0 +1,108 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: no
+ connection: local
+ tasks:
+ - name: Create global pool, reserve subpool and network functions
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: Global_Pool2
+ gateway: '' #use this for updating
+ ip_address_space: IPv6 #required when we are creating
+ cidr: 2001:db8::/64 #required when we are creating
+ pool_type: Generic
+ dhcp_server_ips: [] #use this for updating
+ dns_server_ips: [] #use this for updating
+ # prev_name: Global_Pool2
+ reserve_pool_details:
+ ipv6_address_space: True
+ ipv4_global_pool: 100.0.0.0/8
+ ipv4_prefix: True
+ ipv4_prefix_length: 9
+ ipv4_subnet: 100.128.0.0
+ ipv4_gateway: 100.128.0.1
+ # ipv4_dns_servers: [100.128.0.1]
+ name: IP_Pool_3
+ ipv6_prefix: True
+ ipv6_prefix_length: 64
+ ipv6_global_pool: 2001:db8::/64
+ ipv6_subnet: '2001:db8::'
+ site_name: Global/Chennai/Trill
+ slaac_support: True
+ # prev_name: IP_Pool_4
+ pool_type: LAN
+ network_management_details:
+ settings:
+ dhcp_server:
+ - 10.0.0.1
+ dns_server:
+ domain_name: cisco.com
+ primary_ip_address: 10.0.0.2
+ secondary_ip_address: 10.0.0.3
+ client_and_endpoint_aaa: #works only if we system settigns is set
+ ip_address: 10.197.156.42 #Mandatory for ISE, sec ip for AAA
+ network: 10.0.0.20
+ protocol: RADIUS
+ servers: AAA
+ # shared_secret: string #ISE
+ message_of_the_day:
+ banner_message: hello
+ retain_existing_banner: 'true'
+ netflow_collector:
+ ip_address: 10.0.0.4
+ port: 443
+ network_aaa: #works only if we system settigns is set
+ ip_address: 10.0.0.21 #Mandatory for ISE, sec ip for AAA
+ network: 10.0.0.20
+ protocol: TACACS
+ servers: AAA
+ # shared_secret: string #ISE
+ ntp_server:
+ - 10.0.0.5
+ snmp_server:
+ configure_dnac_ip: false
+ # ip_addresses:
+ # - 10.0.0.6
+ syslog_server:
+ configure_dnac_ip: false
+ # ip_addresses:
+ # - 10.0.0.7
+ timezone: GMT
+ site_name: Global/Chennai
+
+ - name: Delete Global Pool and Release Pool Reservation
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: deleted
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: Global_Pool2
+ reserve_pool_details:
+ name: IP_Pool_3
+ site_name: Global/Chennai/Trill
diff --git a/ansible_collections/cisco/dnac/playbooks/site_intent.yml b/ansible_collections/cisco/dnac/playbooks/site_intent.yml
new file mode 100644
index 000000000..9dfe26099
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/site_intent.yml
@@ -0,0 +1,32 @@
+- hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "credentials.yml"
+ tasks:
+ - name: Get site info and updating site details
+ cisco.dnac.site_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: true
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Test_Floor2
+ parent_name: 'Global/USA/San Francisco/BGL_18'
+ length: "101"
+ width: "75"
+ height: "50"
+ rf_model: 'Cubes And Walled Offices'
+ floor_number: 3
+ - site:
+ area:
+ name: Canada
+ parent_name: 'Global'
+ type: area
diff --git a/ansible_collections/cisco/dnac/playbooks/site_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/site_workflow_manager.yml
new file mode 100644
index 000000000..79b42a436
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/site_workflow_manager.yml
@@ -0,0 +1,39 @@
+- hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "credentials.yml"
+ tasks:
+ - name: Get site info and updating site details
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: DEBUG
+ config_verify: True
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Test_Floor6
+ parent_name: 'Global/USA/San Francisco/BGL_18'
+ length: 103.23
+ width: 75.1
+ height: 50.22
+ rf_model: 'Cubes And Walled Offices'
+ floor_number: 3
+ site_type: floor
+ - site:
+ area:
+ name: Abc
+ parent_name: 'Global'
+ address: Bengaluru, Karnataka, India
+ latitude: 22.2111
+ longitude: -42.1234434
+ country: "United States"
+ site_type: area
diff --git a/ansible_collections/cisco/dnac/playbooks/swim_import_local.yml b/ansible_collections/cisco/dnac/playbooks/swim_import_local.yml
new file mode 100644
index 000000000..8f6c55994
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/swim_import_local.yml
@@ -0,0 +1,22 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: false
+ connection: local
+ tasks:
+ - name: Import Local
+ cisco.dnac.swim_import_local:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ #headers: "{{my_headers | from_json}}"
+ isThirdParty: False
+ thirdPartyVendor: "string"
+ thirdPartyImageFamily: "string"
+ thirdPartyApplicationType: "string"
+ filePath: "./tests/test-1592357065255.csv"
+ register: result \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/swim_intent.yml b/ansible_collections/cisco/dnac/playbooks/swim_intent.yml
new file mode 100644
index 000000000..687a84e25
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/swim_intent.yml
@@ -0,0 +1,43 @@
+---
+- name: Configure device credentials on Cisco DNA Center
+ hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "input_swim.yml"
+ - "credentials.yml"
+ tasks:
+ - name: Import an image, tag it as golden and load it on device {{ item.device_serial_number }}
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: true
+ config:
+ - import_image_details:
+ type: "{{ item.type }}"
+ url_details:
+ payload: "{{ item.url_details.payload }}"
+ tagging_details:
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ device_type: "{{item.device_type}}"
+ tagging: true
+ image_distribution_details:
+ site_name: "{{item.site_name}}"
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ image_activation_details:
+ site_name: "{{item.site_name}}"
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ scehdule_validate: false
+ distribute_if_needed: true
+
+ with_items: "{{ image_details }}"
+ tags:
+ - swim \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/swim_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/swim_workflow_manager.yml
new file mode 100644
index 000000000..c4f027c46
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/swim_workflow_manager.yml
@@ -0,0 +1,54 @@
+---
+- name: Configure device credentials on Cisco DNA Center
+ hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "input_swim.yml"
+ - "credentials.yml"
+ tasks:
+ - name: Import an image, tag it as golden and load it on device
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: DEBUG
+ config_verify: True
+ dnac_api_task_timeout: 1000
+ dnac_task_poll_interval: 1
+ config:
+ - import_image_details:
+ type: "{{ item.type }}"
+ url_details:
+ payload:
+ - source_url: http://172.21.236.183/swim/V1712_2_CCO/cat9k_iosxe.17.12.02.SPA.bin
+ third_party: False
+ tagging_details:
+ image_name: cat9k_iosxe.17.12.02.SPA.bin
+ device_role: ALL
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: "{{item.site_name}}"
+ tagging: True
+ # image_distribution_details:
+ # image_name: cat9k_iosxe.17.12.02.SPA.bin
+ # site_name: "{{item.site_name}}"
+ # device_role: "{{ item.device_role }}"
+ # device_family_name: "{{ item.device_family_name }}"
+ # device_series_name: "Catalyst 9300 Series"
+ image_activation_details:
+ image_name: cat9k_iosxe.17.12.02.SPA.bin
+ site_name: "{{item.site_name}}"
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ device_series_name: "Catalyst 9300 Series"
+ scehdule_validate: False
+ distribute_if_needed: True
+
+ with_items: "{{ image_details }}"
+ tags:
+ - swim
diff --git a/ansible_collections/cisco/dnac/playbooks/template_pnp_intent.yml b/ansible_collections/cisco/dnac/playbooks/template_pnp_intent.yml
index 4ce25e16d..09ea6a722 100644
--- a/ansible_collections/cisco/dnac/playbooks/template_pnp_intent.yml
+++ b/ansible_collections/cisco/dnac/playbooks/template_pnp_intent.yml
@@ -1,14 +1,14 @@
-- hosts: dnac_servers
+- hosts: localhost
vars_files:
- credentials.yml
- - device_details.yml
+ - device_details.template
gather_facts: false
connection: local
tasks:
#
# Project Info Section
#
- - name: Test project template
+ - name: Test project template
cisco.dnac.template_intent:
dnac_host: "{{ dnac_host }}"
dnac_port: "{{ dnac_port }}"
@@ -18,17 +18,19 @@
dnac_debug: "{{ dnac_debug }}"
dnac_log: true
state: "merged"
+ config_verify: true
#ignore_errors: true #Enable this to continue execution even the task fails
config:
- - projectName: "{{ item.proj_name }}"
- templateContent: "{{ item.device_config }}"
- language: "{{ item.language }}"
- deviceTypes:
- - productFamily: "{{ item.family }}"
- softwareType: "{{ item.type }}"
- softwareVariant: "{{ item.variant }}"
- templateName: "{{ item.temp_name }}"
- versionDescription: "{{ item.description }}"
+ - configuration_templates:
+ project_name: "{{ item.proj_name }}"
+ template_name: "{{ item.temp_name }}"
+ template_content: "{{ item.device_config }}"
+ version_description: "{{ item.description }}"
+ language: "{{ item.language }}"
+ software_type: "{{ item.type }}"
+ software_variant: "{{ item.variant }}"
+ device_types:
+ - product_family: "{{ item.family }}"
register: template_result
with_items: '{{ template_details }}'
tags:
@@ -49,12 +51,11 @@
project_name: "{{ item.proj_name }}"
template_name: "{{ item.temp_name }}"
image_name: "{{ item.image_name }}"
- device_version: "{{ item.device_version }}"
- deviceInfo:
- serialNumber: "{{ item.device_number }}"
- hostname: "{{ item.device_name}}"
- state: "{{ item.device_state }}"
- pid: "{{ item.device_id }}"
+ device_info:
+ - serial_number: "{{ item.device_number }}"
+ hostname: "{{ item.device_name}}"
+ state: "{{ item.device_state }}"
+ pid: "{{ item.device_id }}"
register: pnp_result
with_items: '{{ device_details }}'
tags:
diff --git a/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml
new file mode 100644
index 000000000..25b0ec797
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml
@@ -0,0 +1,34 @@
+- hosts: localhost
+ vars_files:
+ - credentials.yml
+ - device_details.template
+ gather_facts: false
+ connection: local
+ tasks:
+ - name: Test project template
+ cisco.dnac.template_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: true
+ state: "merged"
+ config_verify: true
+ #ignore_errors: true #Enable this to continue execution even the task fails
+ config:
+ - configuration_templates:
+ project_name: "{{ item.proj_name }}"
+ template_name: "{{ item.temp_name }}"
+ template_content: "{{ item.device_config }}"
+ version_description: "{{ item.description }}"
+ language: "{{ item.language }}"
+ software_type: "{{ item.type }}"
+ software_variant: "{{ item.variant }}"
+ device_types:
+ - product_family: "{{ item.family }}"
+ register: template_result
+ with_items: '{{ template_details }}'
+ tags:
+ - template
diff --git a/ansible_collections/cisco/dnac/playbooks/test_swim_module.yml b/ansible_collections/cisco/dnac/playbooks/test_swim_module.yml
index 0fbbc2045..7e6e50007 100644
--- a/ansible_collections/cisco/dnac/playbooks/test_swim_module.yml
+++ b/ansible_collections/cisco/dnac/playbooks/test_swim_module.yml
@@ -1,14 +1,12 @@
-- hosts: dnac_servers
- vars_files:
- - credentials_245.yml
- - image_details.yml #Contains image and device details
- gather_facts: false
+---
+- name: Configure device credentials on Cisco DNA Center
+ hosts: localhost
connection: local
+ gather_facts: no
+ vars_files:
+ - "input_swim.yml" #Contains image and device details
+ - "credentials.yml"
tasks:
-#
-# Project Info Section
-#
-
- name: Import an image, tag it as golden and load it on device
cisco.dnac.swim_intent:
dnac_host: "{{ dnac_host }}"
@@ -19,23 +17,26 @@
dnac_debug: "{{ dnac_debug }}"
dnac_log: true
config:
- - importImageDetails:
- type: "{{ item.import_type }}"
- urlDetails:
- payload:
- - sourceURL: "{{ item.url_source }}"
- isThirdParty: false
- taggingDetails:
- deviceRole: "{{ item.device_role }}"
- deviceFamilyName: "{{ item.device_family_name }}"
+ - import_image_details:
+ type: "{{ item.type }}"
+ url_details:
+ payload: "{{ item.url_details.payload }}"
+ tagging_details:
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ device_type: "{{item.device_type}}"
tagging: true
- imageDistributionDetails:
- deviceSerialNumber: "{{ item.device_serial_number }}"
- imageActivationDetails:
- scehduleValidate: false
- activateLowerImageVersion: true
- deviceSerialNumber: "{{ item.device_serial_number }}"
- distributeIfNeeded: true
+ image_distribution_details:
+ site_name: "{{item.site_name}}"
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ image_activation_details:
+ site_name: "{{item.site_name}}"
+ device_role: "{{ item.device_role }}"
+ device_family_name: "{{ item.device_family_name }}"
+ scehdule_validate: false
+ distribute_if_needed: true
+
with_items: '{{ image_details }}'
tags:
- swim
diff --git a/ansible_collections/cisco/dnac/playbooks/tests/test-1592357065255.csv b/ansible_collections/cisco/dnac/playbooks/tests/test-1592357065255.csv
new file mode 100644
index 000000000..95d096fa3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/tests/test-1592357065255.csv
@@ -0,0 +1,3 @@
+Device IP,Device Name,DeviceType,Target Image,Target Version,Image Type,Reboot,Check Type,Description,PreCheck Status,LastChecked
+10.10.20.51,3504_WLC,Wireless Controller,AIR-CT3504-K9-8-10-121-0,8.10.121.0,WLC_SW,Yes,Service Entitlement Check,Service Entitlement Check: WARNING : Unable to validate license for Device. Not enough information to validate.,WARNING,2020-06-17 01:24:04,
+10.10.20.51,3504_WLC,Wireless Controller,AIR-CT3504-K9-8-10-121-0,8.10.121.0,WLC_SW,Yes,Flash check,Upgrade Analysis feature is not supported for Cisco Interfaces and Modules/Content Networking/Third Party devices/Wireless Controllers excluding Cisco 5760 Series; : ,SUCCESS,2020-06-17 01:24:04,
diff --git a/ansible_collections/cisco/dnac/playbooks/user_info.yml b/ansible_collections/cisco/dnac/playbooks/user_info.yml
new file mode 100644
index 000000000..da08c51c9
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/user_info.yml
@@ -0,0 +1,18 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: false
+ connection: local
+ tasks:
+ - name: Get all User
+ cisco.dnac.user_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ #headers: "{{my_headers | from_json}}"
+ invokeSource: string
+ register: result \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/plugins/action/cli_credential.py b/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
index d10c90049..2b42190fa 100644
--- a/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
@@ -127,7 +127,7 @@ class CliCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py b/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
index 1d1ac0acd..c73050be8 100644
--- a/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
@@ -130,7 +130,7 @@ class HttpReadCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py b/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
index 5a63c0d5e..7ae396a1b 100644
--- a/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
@@ -130,7 +130,7 @@ class HttpWriteCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py b/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
index 08aa9d19c..5ea30373d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
@@ -121,7 +121,7 @@ class NetconfCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
index 1ec73115c..6bceab4a1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
@@ -29,7 +29,7 @@ argument_spec.update(dict(
siteId=dict(type="str"),
type=dict(type="str"),
imageInfo=dict(type="dict"),
- configInfo=dict(type="list"),
+ configInfo=dict(type="dict"),
rfProfile=dict(type="str"),
staticIP=dict(type="str"),
subnetMask=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/role_permissions_info.py b/ansible_collections/cisco/dnac/plugins/action/role_permissions_info.py
index 39728ba46..a1b758c74 100644
--- a/ansible_collections/cisco/dnac/plugins/action/role_permissions_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/role_permissions_info.py
@@ -78,8 +78,8 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="userand_roles",
- function='get_permissions_ap_i',
+ family="user_and_roles",
+ function='get_permissions_api',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/roles_info.py b/ansible_collections/cisco/dnac/plugins/action/roles_info.py
index 380c9c687..4283de11b 100644
--- a/ansible_collections/cisco/dnac/plugins/action/roles_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/roles_info.py
@@ -78,8 +78,8 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="userand_roles",
- function='get_roles_ap_i',
+ family="user_and_roles",
+ function='get_roles_api',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
index 60edf5c83..cda2e0c06 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
@@ -115,7 +115,7 @@ class Snmpv2ReadCommunityCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
index 97982d2c7..2ff190d75 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
@@ -115,7 +115,7 @@ class Snmpv2WriteCommunityCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
index 480c82e66..313087715 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
@@ -137,7 +137,7 @@ class Snmpv3Credential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("description") or self.new_object.get("username")
+ name = self.new_object.get("username") or self.new_object.get("description")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
@@ -167,6 +167,9 @@ class Snmpv3Credential(object):
("snmpMode", "snmpMode"),
("username", "username"),
]
+
+ print("requested_obj: ", requested_obj)
+ print("current_obj: ", current_obj)
# Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
# If any does not have eq params, it requires update
return any(not dnac_compare_equality(current_obj.get(dnac_param),
diff --git a/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py b/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
index 57946e44e..b5e3b8a6c 100644
--- a/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
+++ b/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
@@ -20,6 +20,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
DNACSDK,
dnac_argument_spec,
)
+import os
# Get common arguements specification
argument_spec = dnac_argument_spec()
@@ -41,7 +42,8 @@ required_together = []
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
- raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ raise AnsibleActionFail(
+ "ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._supports_check_mode = False
@@ -70,8 +72,12 @@ class ActionModule(ActionBase):
is_third_party=params.get("isThirdParty"),
third_party_vendor=params.get("thirdPartyVendor"),
third_party_image_family=params.get("thirdPartyImageFamily"),
- third_party_application_type=params.get("thirdPartyApplicationType"),
+ third_party_application_type=params.get(
+ "thirdPartyApplicationType"),
file_path=params.get("filePath"),
+ multipart_fields={'file': (os.path.basename(params.get(
+ "filePath")), open(params.get("filePath"), 'rb'))},
+ multipart_monitor_callback=None
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_member.py b/ansible_collections/cisco/dnac/plugins/action/tag_member.py
index e92fdf505..54ae99e17 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_member.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_member.py
@@ -54,6 +54,7 @@ class TagMember(object):
object=params.get("object"),
id=params.get("id"),
member_id=params.get("memberId"),
+ member_type=params.get("memberType"),
)
def create_params(self):
@@ -81,7 +82,8 @@ class TagMember(object):
items = self.dnac.exec(
family="tag",
function="get_tag_members_by_id",
- params={"id": id}
+ params={"id": id, "memberType": self.new_object.get(
+ 'member_type'), }
)
if isinstance(items, dict):
if 'response' in items:
@@ -108,7 +110,8 @@ class TagMember(object):
_id = prev_obj.get("id")
_id = _id or prev_obj.get("memberId")
if id_exists and name_exists and o_id != _id:
- raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ raise InconsistentParameters(
+ "The 'id' and 'name' params don't refer to the same object")
if _id:
self.new_object.update(dict(id=_id))
self.new_object.update(dict(member_id=_id))
@@ -164,7 +167,8 @@ class TagMember(object):
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
- raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ raise AnsibleActionFail(
+ "ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._supports_check_mode = False
diff --git a/ansible_collections/cisco/dnac/plugins/action/user.py b/ansible_collections/cisco/dnac/plugins/action/user.py
index b9c8d5c4e..1896e57ff 100644
--- a/ansible_collections/cisco/dnac/plugins/action/user.py
+++ b/ansible_collections/cisco/dnac/plugins/action/user.py
@@ -91,8 +91,8 @@ class User(object):
# NOTE: Does not have a get by name method, using get all
try:
items = self.dnac.exec(
- family="userand_roles",
- function="get_users_ap_i",
+ family="user_and_roles",
+ function="get_users_api",
params=self.get_all_params(name=name),
)
if isinstance(items, dict):
@@ -148,8 +148,8 @@ class User(object):
def create(self):
result = self.dnac.exec(
- family="userand_roles",
- function="add_user_ap_i",
+ family="user_and_roles",
+ function="add_user_api",
params=self.create_params(),
op_modifies=True,
)
@@ -160,8 +160,8 @@ class User(object):
name = self.new_object.get("name")
result = None
result = self.dnac.exec(
- family="userand_roles",
- function="update_user_ap_i",
+ family="user_and_roles",
+ function="update_user_api",
params=self.update_all_params(),
op_modifies=True,
)
diff --git a/ansible_collections/cisco/dnac/plugins/action/user_info.py b/ansible_collections/cisco/dnac/plugins/action/user_info.py
index eb9ccc38e..7a5906fcc 100644
--- a/ansible_collections/cisco/dnac/plugins/action/user_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/user_info.py
@@ -80,8 +80,8 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="userand_roles",
- function='get_users_ap_i',
+ family="user_and_roles",
+ function='get_users_api',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/users_external_servers_info.py b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_info.py
index 1436338cc..b29ce6bb2 100644
--- a/ansible_collections/cisco/dnac/plugins/action/users_external_servers_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_info.py
@@ -80,8 +80,8 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="userand_roles",
- function='get_external_authentication_servers_ap_i',
+ family="user_and_roles",
+ function='get_external_authentication_servers_api',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/doc_fragments/intent_params.py b/ansible_collections/cisco/dnac/plugins/doc_fragments/intent_params.py
index fe95d684d..7e61fedc8 100644
--- a/ansible_collections/cisco/dnac/plugins/doc_fragments/intent_params.py
+++ b/ansible_collections/cisco/dnac/plugins/doc_fragments/intent_params.py
@@ -50,10 +50,60 @@ options:
default: false
dnac_log:
description:
- - Flag for logging playbook execution details.
- If set to true the log file will be created at the location of the execution with the name dnac.log
+ - Flag to enable/disable playbook execution logging.
+ - When true and dnac_log_file_path is provided,
+ - Create the log file at the execution location with the specified name.
+ - When true and dnac_log_file_path is not provided,
+ - Create the log file at the execution location with the name 'dnac.log'.
+ - When false,
+ - Logging is disabled.
+ - If the log file doesn't exist,
+ - It is created in append or write mode based on the "dnac_log_append" flag.
+ - If the log file exists,
+ - It is overwritten or appended based on the "dnac_log_append" flag.
type: bool
default: false
+ dnac_log_level:
+ description:
+ - Sets the threshold for log level. Messages with a level equal to or higher than
+ this will be logged. Levels are listed in order of severity [CRITICAL, ERROR, WARNING, INFO, DEBUG].
+ - CRITICAL indicates serious errors halting the program. Displays only CRITICAL messages.
+ - ERROR indicates problems preventing a function. Displays ERROR and CRITICAL messages.
+ - WARNING indicates potential future issues. Displays WARNING, ERROR, CRITICAL messages.
+ - INFO tracks normal operation. Displays INFO, WARNING, ERROR, CRITICAL messages.
+ - DEBUG provides detailed diagnostic info. Displays all log messages.
+ type: str
+ default: WARNING
+ dnac_log_file_path:
+ description:
+ - Governs logging. Logs are recorded if dnac_log is True.
+ - If path is not specified,
+ - When 'dnac_log_append' is True, 'dnac.log' is generated in the
+ current Ansible directory; logs are appended.
+ - When 'dnac_log_append' is False, 'dnac.log' is generated; logs
+ are overwritten.
+ - If path is specified,
+ - When 'dnac_log_append' is True, the file opens in append mode.
+ - When 'dnac_log_append' is False, the file opens in write (w) mode.
+ - In shared file scenarios, without append mode, content is
+ overwritten after each module execution.
+ - For a shared log file, set append to False for the 1st module
+ (to overwrite); for subsequent modules, set append to True.
+ type: str
+ default: dnac.log
+ dnac_log_append:
+ description: Determines the mode of the file. Set to True for 'append' mode. Set to False for 'write' mode.
+ type: bool
+ default: True
+ dnac_api_task_timeout:
+ description: Defines the timeout in seconds for API calls to retrieve task details. If the task details
+ are not received within this period, the process will end, and a timeout notification will be logged.
+ type: int
+ default: 1200
+ dnac_task_poll_interval:
+ description: Specifies the interval in seconds between successive calls to the API to retrieve task details.
+ type: int
+ default: 2
validate_response_schema:
description:
- Flag for Cisco DNA Center SDK to enable the validation of request bodies against a JSON schema.
diff --git a/ansible_collections/cisco/dnac/plugins/doc_fragments/workflow_manager_params.py b/ansible_collections/cisco/dnac/plugins/doc_fragments/workflow_manager_params.py
new file mode 100644
index 000000000..8656bcb28
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/doc_fragments/workflow_manager_params.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+__author__ = ['Madhan Sankaranarayanan, Muthu Rakesh']
+
+
+class ModuleDocFragment(object):
+
+ # Standard files documentation fragment
+ DOCUMENTATION = r'''
+options:
+ dnac_host:
+ description:
+ - The hostname of the Cisco Catalyst Center.
+ type: str
+ required: true
+ dnac_port:
+ description:
+ - Specifies the port number associated with the Cisco Catalyst Center.
+ type: str
+ default: '443'
+ dnac_username:
+ description:
+ - The username for authentication at the Cisco Catalyst Center.
+ type: str
+ default: admin
+ aliases: [ user ]
+ dnac_password:
+ description:
+ - The password for authentication at the Cisco Catalyst Center.
+ type: str
+ dnac_verify:
+ description:
+ - Flag to enable or disable SSL certificate verification.
+ type: bool
+ default: true
+ dnac_version:
+ description:
+ - Specifies the version of the Cisco Catalyst Center that the SDK should use.
+ type: str
+ default: 2.2.3.3
+ dnac_debug:
+ description:
+ - Indicates whether debugging is enabled in the Cisco Catalyst Center SDK.
+ type: bool
+ default: false
+ dnac_log:
+ description:
+ - Flag to enable/disable playbook execution logging.
+ - When true and dnac_log_file_path is provided,
+ - Create the log file at the execution location with the specified name.
+ - When true and dnac_log_file_path is not provided,
+ - Create the log file at the execution location with the name 'dnac.log'.
+ - When false,
+ - Logging is disabled.
+ - If the log file doesn't exist,
+ - It is created in append or write mode based on the "dnac_log_append" flag.
+ - If the log file exists,
+ - It is overwritten or appended based on the "dnac_log_append" flag.
+ type: bool
+ default: false
+ dnac_log_level:
+ description:
+ - Sets the threshold for log level. Messages with a level equal to or higher than
+ this will be logged. Levels are listed in order of severity [CRITICAL, ERROR, WARNING, INFO, DEBUG].
+ - CRITICAL indicates serious errors halting the program. Displays only CRITICAL messages.
+ - ERROR indicates problems preventing a function. Displays ERROR and CRITICAL messages.
+ - WARNING indicates potential future issues. Displays WARNING, ERROR, CRITICAL messages.
+ - INFO tracks normal operation. Displays INFO, WARNING, ERROR, CRITICAL messages.
+ - DEBUG provides detailed diagnostic info. Displays all log messages.
+ type: str
+ default: WARNING
+ dnac_log_file_path:
+ description:
+ - Governs logging. Logs are recorded if dnac_log is True.
+ - If path is not specified,
+ - When 'dnac_log_append' is True, 'dnac.log' is generated in the
+ current Ansible directory; logs are appended.
+ - When 'dnac_log_append' is False, 'dnac.log' is generated; logs
+ are overwritten.
+ - If path is specified,
+ - When 'dnac_log_append' is True, the file opens in append mode.
+ - When 'dnac_log_append' is False, the file opens in write (w) mode.
+ - In shared file scenarios, without append mode, content is
+ overwritten after each module execution.
+ - For a shared log file, set append to False for the 1st module
+ (to overwrite); for subsequent modules, set append to True.
+ type: str
+ default: dnac.log
+ dnac_log_append:
+ description: Determines the mode of the file. Set to True for 'append' mode. Set to False for 'write' mode.
+ type: bool
+ default: True
+ validate_response_schema:
+ description:
+ - Flag for Cisco Catalyst Center SDK to enable the validation of request bodies against a JSON schema.
+ type: bool
+ default: true
+ dnac_api_task_timeout:
+ description: Defines the timeout in seconds for API calls to retrieve task details. If the task details
+ are not received within this period, the process will end, and a timeout notification will be logged.
+ type: int
+ default: 1200
+ dnac_task_poll_interval:
+ description: Specifies the interval in seconds between successive calls to the API to retrieve task details.
+ type: int
+ default: 2
+notes:
+ - "Does not support C(check_mode)"
+ - "The plugin runs on the control node and does not use any ansible connection plugins instead embedded connection manager from Cisco Catalyst Center SDK"
+ - "The parameters starting with dnac_ are used by the Cisco Catalyst Center Python SDK to establish the connection"
+'''
diff --git a/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py b/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
index e9d61527d..a12e7eaf4 100644
--- a/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
+++ b/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
@@ -14,6 +14,7 @@ else:
DNAC_SDK_IS_INSTALLED = True
from ansible.module_utils._text import to_native
from ansible.module_utils.common import validation
+from abc import ABCMeta, abstractmethod
try:
import logging
except ImportError:
@@ -21,17 +22,468 @@ except ImportError:
else:
LOGGING_IN_STANDARD = True
import os.path
-import datetime
+import copy
+import json
+# import datetime
import inspect
+import re
+
+
+class DnacBase():
+
+ """Class contains members which can be reused for all intent modules"""
+
+ __metaclass__ = ABCMeta
+ __is_log_init = False
+
+ def __init__(self, module):
+ self.module = module
+ self.params = module.params
+ self.config = copy.deepcopy(module.params.get("config"))
+ self.have = {}
+ self.want = {}
+ self.validated_config = []
+ self.msg = ""
+ self.status = "success"
+ dnac_params = self.get_dnac_params(self.params)
+ self.dnac = DNACSDK(params=dnac_params)
+ self.dnac_apply = {'exec': self.dnac._exec}
+ self.get_diff_state_apply = {'merged': self.get_diff_merged,
+ 'deleted': self.get_diff_deleted,
+ 'replaced': self.get_diff_replaced,
+ 'overridden': self.get_diff_overridden,
+ 'gathered': self.get_diff_gathered,
+ 'rendered': self.get_diff_rendered,
+ 'parsed': self.get_diff_parsed
+ }
+ self.verify_diff_state_apply = {'merged': self.verify_diff_merged,
+ 'deleted': self.verify_diff_deleted,
+ 'replaced': self.verify_diff_replaced,
+ 'overridden': self.verify_diff_overridden,
+ 'gathered': self.verify_diff_gathered,
+ 'rendered': self.verify_diff_rendered,
+ 'parsed': self.verify_diff_parsed
+ }
+ self.dnac_log = dnac_params.get("dnac_log")
+
+ if self.dnac_log and not DnacBase.__is_log_init:
+ self.dnac_log_level = dnac_params.get("dnac_log_level") or 'WARNING'
+ self.dnac_log_level = self.dnac_log_level.upper()
+ self.validate_dnac_log_level()
+ self.dnac_log_file_path = dnac_params.get("dnac_log_file_path") or 'dnac.log'
+ self.validate_dnac_log_file_path()
+ self.dnac_log_mode = 'w' if not dnac_params.get("dnac_log_append") else 'a'
+ self.setup_logger('logger')
+ self.logger = logging.getLogger('logger')
+ DnacBase.__is_log_init = True
+ self.log('Logging configured and initiated', "DEBUG")
+ elif not self.dnac_log:
+ # If dnac_log is False, return an empty logger
+ self.logger = logging.getLogger('empty_logger')
+
+ self.log('Cisco Catalyst Center parameters: {0}'.format(dnac_params), "DEBUG")
+ self.supported_states = ["merged", "deleted", "replaced", "overridden", "gathered", "rendered", "parsed"]
+ self.result = {"changed": False, "diff": [], "response": [], "warnings": []}
+
+ @abstractmethod
+ def validate_input(self):
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "failed"
+ return self
+
+ def get_diff_merged(self):
+ # Implement logic to merge the resource configuration
+ self.merged = True
+ return self
+
+ def get_diff_deleted(self):
+ # Implement logic to delete the resource
+ self.deleted = True
+ return self
+
+ def get_diff_replaced(self):
+ # Implement logic to replace the resource
+ self.replaced = True
+ return self
+
+ def get_diff_overridden(self):
+ # Implement logic to overwrite the resource
+ self.overridden = True
+ return self
+
+ def get_diff_gathered(self):
+ # Implement logic to gather data about the resource
+ self.gathered = True
+ return self
+
+ def get_diff_rendered(self):
+ # Implement logic to render a configuration template
+ self.rendered = True
+ return self
+
+ def get_diff_parsed(self):
+ # Implement logic to parse a configuration file
+ self.parsed = True
+ return self
+
+ def verify_diff_merged(self):
+ # Implement logic to verify the merged resource configuration
+ self.merged = True
+ return self
+
+ def verify_diff_deleted(self):
+ # Implement logic to verify the deleted resource
+ self.deleted = True
+ return self
+
+ def verify_diff_replaced(self):
+ # Implement logic to verify the replaced resource
+ self.replaced = True
+ return self
+
+ def verify_diff_overridden(self):
+ # Implement logic to verify the overwritten resource
+ self.overridden = True
+ return self
+
+ def verify_diff_gathered(self):
+ # Implement logic to verify the gathered data about the resource
+ self.gathered = True
+ return self
+
+ def verify_diff_rendered(self):
+ # Implement logic to verify the rendered configuration template
+ self.rendered = True
+ return self
+
+ def verify_diff_parsed(self):
+ # Implement logic to verify the parsed configuration file
+ self.parsed = True
+ return self
+
+ def setup_logger(self, logger_name):
+ """Set up a logger with specified name and configuration based on dnac_log_level"""
+ level_mapping = {
+ 'INFO': logging.INFO,
+ 'DEBUG': logging.DEBUG,
+ 'WARNING': logging.WARNING,
+ 'ERROR': logging.ERROR,
+ 'CRITICAL': logging.CRITICAL
+ }
+ level = level_mapping.get(self.dnac_log_level, logging.WARNING)
+
+ logger = logging.getLogger(logger_name)
+ # formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(module)s: %(funcName)s: %(lineno)d --- %(message)s', datefmt='%m-%d-%Y %H:%M:%S')
+ formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%m-%d-%Y %H:%M:%S')
+
+ file_handler = logging.FileHandler(self.dnac_log_file_path, mode=self.dnac_log_mode)
+ file_handler.setFormatter(formatter)
+
+ logger.setLevel(level)
+ logger.addHandler(file_handler)
+
+ def validate_dnac_log_level(self):
+ """Validates if the logging level is string and of expected value"""
+ if self.dnac_log_level not in ('INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'):
+ raise ValueError("Invalid log level: 'dnac_log_level:{0}'".format(self.dnac_log_level))
+
+ def validate_dnac_log_file_path(self):
+ """
+ Validates the specified log file path, ensuring it is either absolute or relative,
+ the directory exists, and has a .log extension.
+ """
+ # Convert the path to absolute if it's relative
+ dnac_log_file_path = os.path.abspath(self.dnac_log_file_path)
+
+ # Validate if the directory exists
+ log_directory = os.path.dirname(dnac_log_file_path)
+ if not os.path.exists(log_directory):
+ raise FileNotFoundError("The directory for log file '{0}' does not exist.".format(dnac_log_file_path))
+
+ def log(self, message, level="WARNING", frameIncrement=0):
+ """Logs formatted messages with specified log level and incrementing the call stack frame
+ Args:
+ self (obj, required): An instance of the DnacBase Class.
+ message (str, required): The log message to be recorded.
+ level (str, optional): The log level, default is "info".
+ The log level can be one of 'DEBUG', 'INFO', 'WARNING', 'ERROR', or 'CRITICAL'.
+ """
+
+ if self.dnac_log:
+ # of.write("---- %s ---- %s@%s ---- %s \n" % (d, info.lineno, info.function, msg))
+ # message = "Module: " + self.__class__.__name__ + ", " + message
+ class_name = self.__class__.__name__
+ callerframerecord = inspect.stack()[1 + frameIncrement]
+ frame = callerframerecord[0]
+ info = inspect.getframeinfo(frame)
+ log_message = " %s: %s: %s: %s \n" % (class_name, info.function, info.lineno, message)
+ log_method = getattr(self.logger, level.lower())
+ log_method(log_message)
+
+ def check_return_status(self):
+ """API to check the return status value and exit/fail the module"""
+
+ # self.log("status: {0}, msg:{1}".format(self.status, self.msg), frameIncrement=1)
+ self.log("status: {0}, msg: {1}".format(self.status, self.msg), "DEBUG")
+ if "failed" in self.status:
+ self.module.fail_json(msg=self.msg, response=[])
+ elif "exited" in self.status:
+ self.module.exit_json(**self.result)
+ elif "invalid" in self.status:
+ self.module.fail_json(msg=self.msg, response=[])
+
+ def is_valid_password(self, password):
+ """
+ Check if a password is valid.
+ Args:
+ self (object): An instance of a class that provides access to Cisco Catalyst Center.
+ password (str): The password to be validated.
+ Returns:
+ bool: True if the password is valid, False otherwise.
+ Description:
+ The function checks the validity of a password based on the following criteria:
+ - Minimum 8 characters.
+ - At least one lowercase letter.
+ - At least one uppercase letter.
+ - At least one digit.
+ - At least one special character
+ """
+
+ pattern = r"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[-=\\;,./~!@#$%^&*()_+{}[\]|:?]).{8,}$"
+
+ return re.match(pattern, password) is not None
+
+ def get_dnac_params(self, params):
+ """Store the Cisco Catalyst Center parameters from the playbook"""
+
+ dnac_params = {"dnac_host": params.get("dnac_host"),
+ "dnac_port": params.get("dnac_port"),
+ "dnac_username": params.get("dnac_username"),
+ "dnac_password": params.get("dnac_password"),
+ "dnac_verify": params.get("dnac_verify"),
+ "dnac_debug": params.get("dnac_debug"),
+ "dnac_log": params.get("dnac_log"),
+ "dnac_log_level": params.get("dnac_log_level"),
+ "dnac_log_file_path": params.get("dnac_log_file_path"),
+ "dnac_log_append": params.get("dnac_log_append")
+ }
+ return dnac_params
+
+ def get_task_details(self, task_id):
+ """
+ Get the details of a specific task in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class that provides access to Cisco Catalyst Center.
+ task_id (str): The unique identifier of the task for which you want to retrieve details.
+ Returns:
+ dict or None: A dictionary containing detailed information about the specified task,
+ or None if the task with the given task_id is not found.
+ Description:
+ If the task with the specified task ID is not found in Cisco Catalyst Center, this function will return None.
+ """
+ result = None
+ response = self.dnac._exec(
+ family="task",
+ function='get_task_by_id',
+ params={"task_id": task_id}
+ )
+
+ self.log('Task Details: {0}'.format(str(response)), 'DEBUG')
+ self.log("Retrieving task details by the API 'get_task_by_id' using task ID: {0}, Response: {1}".format(task_id, response), "DEBUG")
+
+ if response and isinstance(response, dict):
+ result = response.get('response')
+
+ return result
+
+ def check_task_response_status(self, response, validation_string, data=False):
+ """
+ Get the site id from the site name.
+
+ Parameters:
+ self - The current object details.
+ response (dict) - API response.
+ validation_string (string) - String used to match the progress status.
+
+ Returns:
+ self
+ """
+
+ if not response:
+ self.msg = "response is empty"
+ self.status = "exited"
+ return self
+
+ if not isinstance(response, dict):
+ self.msg = "response is not a dictionary"
+ self.status = "exited"
+ return self
+
+ response = response.get("response")
+ if response.get("errorcode") is not None:
+ self.msg = response.get("response").get("detail")
+ self.status = "failed"
+ return self
+
+ task_id = response.get("taskId")
+ while True:
+ task_details = self.get_task_details(task_id)
+ self.log('Getting task details from task ID {0}: {1}'.format(task_id, task_details), "DEBUG")
+
+ if task_details.get("isError") is True:
+ if task_details.get("failureReason"):
+ self.msg = str(task_details.get("failureReason"))
+ else:
+ self.msg = str(task_details.get("progress"))
+ self.status = "failed"
+ break
+
+ if validation_string in task_details.get("progress").lower():
+ self.result['changed'] = True
+ if data is True:
+ self.msg = task_details.get("data")
+ self.status = "success"
+ break
+
+ self.log("progress set to {0} for taskid: {1}".format(task_details.get('progress'), task_id), "DEBUG")
+
+ return self
+
+ def reset_values(self):
+ """Reset all neccessary attributes to default values"""
+
+ self.have.clear()
+ self.want.clear()
+
+ def get_execution_details(self, execid):
+ """
+ Get the execution details of an API
+
+ Parameters:
+ execid (str) - Id for API execution
+
+ Returns:
+ response (dict) - Status for API execution
+ """
+
+ self.log("Execution Id: {0}".format(execid), "DEBUG")
+ response = self.dnac._exec(
+ family="task",
+ function='get_business_api_execution_details',
+ params={"execution_id": execid}
+ )
+ self.log("Response for the current execution: {0}".format(response))
+ return response
+
+ def check_execution_response_status(self, response):
+ """
+ Checks the reponse status provided by API in the Cisco Catalyst Center
+
+ Parameters:
+ response (dict) - API response
+
+ Returns:
+ self
+ """
+
+ if not response:
+ self.msg = "response is empty"
+ self.status = "failed"
+ return self
+
+ if not isinstance(response, dict):
+ self.msg = "response is not a dictionary"
+ self.status = "failed"
+ return self
+
+ executionid = response.get("executionId")
+ while True:
+ execution_details = self.get_execution_details(executionid)
+ if execution_details.get("status") == "SUCCESS":
+ self.result['changed'] = True
+ self.msg = "Successfully executed"
+ self.status = "success"
+ break
+
+ if execution_details.get("bapiError"):
+ self.msg = execution_details.get("bapiError")
+ self.status = "failed"
+ break
+
+ return self
-def log(msg):
- with open('dnac.log', 'a') as of:
- callerframerecord = inspect.stack()[1]
- frame = callerframerecord[0]
- info = inspect.getframeinfo(frame)
- d = datetime.datetime.now().replace(microsecond=0).isoformat()
- of.write("---- %s ---- %s@%s ---- %s \n" % (d, info.lineno, info.function, msg))
+ def check_string_dictionary(self, task_details_data):
+ """
+ Check whether the input is string dictionary or string.
+
+ Parameters:
+ task_details_data (string) - Input either string dictionary or string.
+
+ Returns:
+ value (dict) - If the input is string dictionary, else returns None.
+ """
+
+ try:
+ value = json.loads(task_details_data)
+ if isinstance(value, dict):
+ return value
+ except json.JSONDecodeError:
+ pass
+ return None
+
+ def camel_to_snake_case(self, config):
+ """
+ Convert camel case keys to snake case keys in the config.
+
+ Parameters:
+ config (list) - Playbook details provided by the user.
+
+ Returns:
+ new_config (list) - Updated config after eliminating the camel cases.
+ """
+
+ if isinstance(config, dict):
+ new_config = {}
+ for key, value in config.items():
+ new_key = re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', key).lower()
+ if new_key != key:
+ self.log("{0} will be deprecated soon. Please use {1}.".format(key, new_key), "DEBUG")
+ new_value = self.camel_to_snake_case(value)
+ new_config[new_key] = new_value
+ elif isinstance(config, list):
+ return [self.camel_to_snake_case(item) for item in config]
+ else:
+ return config
+ return new_config
+
+ def update_site_type_key(self, config):
+ """
+ Replace 'site_type' key with 'type' in the config.
+
+ Parameters:
+ config (list or dict) - Configuration details.
+
+ Returns:
+ updated_config (list or dict) - Updated config after replacing the keys.
+ """
+
+ if isinstance(config, dict):
+ new_config = {}
+ for key, value in config.items():
+ if key == "site_type":
+ new_key = "type"
+ else:
+ new_key = re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', key).lower()
+ new_value = self.update_site_type_key(value)
+ new_config[new_key] = new_value
+ elif isinstance(config, list):
+ return [self.update_site_type_key(item) for item in config]
+ else:
+ return config
+
+ return new_config
def is_list_complex(x):
@@ -127,6 +579,167 @@ def dnac_argument_spec():
return argument_spec
+def validate_str(item, param_spec, param_name, invalid_params):
+ """
+ This function checks that the input `item` is a valid string and confirms to
+ the constraints specified in `param_spec`. If the string is not valid or does
+ not meet the constraints, an error message is added to `invalid_params`.
+
+ Args:
+ item (str): The input string to be validated.
+ param_spec (dict): The parameter's specification, including validation constraints.
+ param_name (str): The name of the parameter being validated.
+ invalid_params (list): A list to collect validation error messages.
+
+ Returns:
+ str: The validated and possibly normalized string.
+
+ Example `param_spec`:
+ {
+ "type": "str",
+ "length_max": 255 # Optional: maximum allowed length
+ }
+ """
+
+ item = validation.check_type_str(item)
+ if param_spec.get("length_max"):
+ if 1 <= len(item) <= param_spec.get("length_max"):
+ return item
+ else:
+ invalid_params.append(
+ "{0}:{1} : The string exceeds the allowed "
+ "range of max {2} char".format(param_name, item, param_spec.get("length_max"))
+ )
+ return item
+
+
+def validate_int(item, param_spec, param_name, invalid_params):
+ """
+ This function checks that the input `item` is a valid integer and conforms to
+ the constraints specified in `param_spec`. If the integer is not valid or does
+ not meet the constraints, an error message is added to `invalid_params`.
+
+ Args:
+ item (int): The input integer to be validated.
+ param_spec (dict): The parameter's specification, including validation constraints.
+ param_name (str): The name of the parameter being validated.
+ invalid_params (list): A list to collect validation error messages.
+
+ Returns:
+ int: The validated integer.
+
+ Example `param_spec`:
+ {
+ "type": "int",
+ "range_min": 1, # Optional: minimum allowed value
+ "range_max": 100 # Optional: maximum allowed value
+ }
+ """
+
+ item = validation.check_type_int(item)
+ min_value = 1
+ if param_spec.get("range_min") is not None:
+ min_value = param_spec.get("range_min")
+ if param_spec.get("range_max"):
+ if min_value <= item <= param_spec.get("range_max"):
+ return item
+ else:
+ invalid_params.append(
+ "{0}:{1} : The item exceeds the allowed "
+ "range of max {2}".format(param_name, item, param_spec.get("range_max"))
+ )
+ return item
+
+
+def validate_bool(item, param_spec, param_name, invalid_params):
+ """
+ This function checks that the input `item` is a valid boolean value. If it does
+ not represent a valid boolean value, an error message is added to `invalid_params`.
+
+ Args:
+ item (bool): The input boolean value to be validated.
+ param_spec (dict): The parameter's specification, including validation constraints.
+ param_name (str): The name of the parameter being validated.
+ invalid_params (list): A list to collect validation error messages.
+
+ Returns:
+ bool: The validated boolean value.
+ """
+
+ return validation.check_type_bool(item)
+
+
+def validate_list(item, param_spec, param_name, invalid_params):
+ """
+ This function checks if the input `item` is a valid list based on the specified `param_spec`.
+ It also verifies that the elements of the list match the expected data type specified in the
+ `param_spec`. If any validation errors occur, they are appended to the `invalid_params` list.
+
+ Args:
+ item (list): The input list to be validated.
+ param_spec (dict): The parameter's specification, including validation constraints.
+ param_name (str): The name of the parameter being validated.
+ invalid_params (list): A list to collect validation error messages.
+
+ Returns:
+ list: The validated list, potentially normalized based on the specification.
+ """
+
+ try:
+ if param_spec.get("type") == type(item).__name__:
+ keys_list = []
+ for dict_key in param_spec:
+ keys_list.append(dict_key)
+ if len(keys_list) == 1:
+ return validation.check_type_list(item)
+
+ temp_dict = {keys_list[1]: param_spec[keys_list[1]]}
+ try:
+ if param_spec['elements']:
+ get_spec_type = param_spec['type']
+ get_spec_element = param_spec['elements']
+ if type(item).__name__ == get_spec_type:
+ for element in item:
+ if type(element).__name__ != get_spec_element:
+ invalid_params.append(
+ "{0} is not of the same datatype as expected which is {1}".format(element, get_spec_element)
+ )
+ else:
+ invalid_params.append(
+ "{0} is not of the same datatype as expected which is {1}".format(item, get_spec_type)
+ )
+ except Exception as e:
+ item, list_invalid_params = validate_list_of_dicts(item, temp_dict)
+ invalid_params.extend(list_invalid_params)
+ else:
+ invalid_params.append("{0} : is not a valid list".format(item))
+ except Exception as e:
+ invalid_params.append("{0} : comes into the exception".format(e))
+
+ return item
+
+
+def validate_dict(item, param_spec, param_name, invalid_params):
+ """
+ This function checks if the input `item` is a valid dictionary based on the specified `param_spec`.
+ If the dictionary does not match the expected data type specified in the `param_spec`,
+ a validation error is appended to the `invalid_params` list.
+
+ Args:
+ item (dict): The input dictionary to be validated.
+ param_spec (dict): The parameter's specification, including validation constraints.
+ param_name (str): The name of the parameter being validated.
+ invalid_params (list): A list to collect validation error messages.
+
+ Returns:
+ dict: The validated dictionary.
+ """
+
+ if param_spec.get("type") != type(item).__name__:
+ invalid_params.append("{0} : is not a valid dictionary".format(item))
+ return validation.check_type_dict(item)
+
+
def validate_list_of_dicts(param_list, spec, module=None):
"""Validate/Normalize playbook params. Will raise when invalid parameters found.
param_list: a playbook parameter list of dicts
@@ -135,14 +748,19 @@ def validate_list_of_dicts(param_list, spec, module=None):
foo=dict(type='str', default='bar'))
return: list of normalized input data
"""
+
v = validation
normalized = []
invalid_params = []
+
for list_entry in param_list:
valid_params_dict = {}
+ if not spec:
+ # Handle the case when spec becomes empty but param list is still there
+ invalid_params.append("No more spec to validate, but parameters remain")
+ break
for param in spec:
item = list_entry.get(param)
- log(str(item))
if item is None:
if spec[param].get("required"):
invalid_params.append(
@@ -150,58 +768,41 @@ def validate_list_of_dicts(param_list, spec, module=None):
)
else:
item = spec[param].get("default")
+ valid_params_dict[param] = item
+ continue
+ data_type = spec[param].get("type")
+ switch = {
+ "str": validate_str,
+ "int": validate_int,
+ "bool": validate_bool,
+ "list": validate_list,
+ "dict": validate_dict,
+ }
+
+ validator = switch.get(data_type)
+ if validator:
+ item = validator(item, spec[param], param, invalid_params)
else:
- type = spec[param].get("type")
- if type == "str":
- item = v.check_type_str(item)
- if spec[param].get("length_max"):
- if 1 <= len(item) <= spec[param].get("length_max"):
- pass
- else:
- invalid_params.append(
- "{0}:{1} : The string exceeds the allowed "
- "range of max {2} char".format(
- param, item, spec[param].get("length_max")
- )
- )
- elif type == "int":
- item = v.check_type_int(item)
- min_value = 1
- if spec[param].get("range_min") is not None:
- min_value = spec[param].get("range_min")
- if spec[param].get("range_max"):
- if min_value <= item <= spec[param].get("range_max"):
- pass
- else:
- invalid_params.append(
- "{0}:{1} : The item exceeds the allowed "
- "range of max {2}".format(
- param, item, spec[param].get("range_max")
- )
- )
- elif type == "bool":
- item = v.check_type_bool(item)
- elif type == "list":
- item = v.check_type_list(item)
- elif type == "dict":
- item = v.check_type_dict(item)
-
- choice = spec[param].get("choices")
- if choice:
- if item not in choice:
- invalid_params.append(
- "{0} : Invalid choice provided".format(item)
- )
+ invalid_params.append(
+ "{0}:{1} : Unsupported data type {2}.".format(param, item, data_type)
+ )
- no_log = spec[param].get("no_log")
- if no_log:
- if module is not None:
- module.no_log_values.add(item)
- else:
- msg = "\n\n'{0}' is a no_log parameter".format(param)
- msg += "\nAnsible module object must be passed to this "
- msg += "\nfunction to ensure it is not logged\n\n"
- raise Exception(msg)
+ choice = spec[param].get("choices")
+ if choice:
+ if item not in choice:
+ invalid_params.append(
+ "{0} : Invalid choice provided".format(item)
+ )
+
+ no_log = spec[param].get("no_log")
+ if no_log:
+ if module is not None:
+ module.no_log_values.add(item)
+ else:
+ msg = "\n\n'{0}' is a no_log parameter".format(param)
+ msg += "\nAnsible module object must be passed to this "
+ msg += "\nfunction to ensure it is not logged\n\n"
+ raise Exception(msg)
valid_params_dict[param] = item
normalized.append(valid_params_dict)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py
new file mode 100644
index 000000000..8e2f41384
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py
@@ -0,0 +1,2618 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to perform operations on device credentials in Cisco DNA Center."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan']
+
+DOCUMENTATION = r"""
+---
+module: device_credential_intent
+short_description: Resource module for Global Device Credentials and Assigning Credentials to sites.
+description:
+- Manage operations on Global Device Credentials and Assigning Credentials to sites.
+- API to create global device credentials.
+- API to update global device credentials.
+- API to delete global device credentials.
+- API to assign the device credential to the site.
+version_added: '6.7.0'
+extends_documentation_fragment:
+ - cisco.dnac.intent_params
+author: Muthu Rakesh (@MUTHU-RAKESH-27)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco DNA Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco DNA Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of global device credentials and site names.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ global_credential_details:
+ description: Manages global device credentials
+ type: dict
+ suboptions:
+ cli_credential:
+ description: Global Credential V2's cliCredential.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ enable_password:
+ description:
+ - cli_credential credential Enable Password.
+ - Password cannot contain spaces or angle brackets (< >)
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ password:
+ description:
+ - cli_credential credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ username:
+ description:
+ - cli_credential credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ https_read:
+ description: Global Credential V2's httpsRead.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ name:
+ description: Name. Required for creating the credential.
+ type: str
+ password:
+ description:
+ - https_read credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ port:
+ description: Port. Default port is 443.
+ type: int
+ username:
+ description:
+ - https_read credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ https_write:
+ description: Global Credential V2's httpsWrite.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ name:
+ description: Name. Required for creating the credential.
+ type: str
+ password:
+ description:
+ - https_write credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ port:
+ description: Port. Default port is 443.
+ type: int
+ username:
+ description:
+ - https_write credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ snmp_v2c_read:
+ description: Global Credential V2's snmpV2cRead.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ read_community:
+ description:
+ - snmp_v2c_read Read Community.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ snmp_v2c_write:
+ description: Global Credential V2's snmpV2cWrite.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ write_community:
+ description:
+ - snmp_v2c_write Write Community.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ snmp_v3:
+ description: Global Credential V2's snmpV3.
+ type: list
+ elements: dict
+ suboptions:
+ auth_password:
+ description:
+ - snmp_v3 Auth Password.
+ - Password must contain minimum 8 characters.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ auth_type:
+ description: Auth Type. ["SHA", "MD5"].
+ type: str
+ description:
+ description:
+ - snmp_v3 Description.
+ - Should be unique from other snmp_v3 credentials.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ privacy_password:
+ description:
+ - snmp_v3 Privacy Password.
+ - Password must contain minimum 8 characters.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ privacy_type:
+ description: Privacy Type. ["AES128", "AES192", "AES256"].
+ type: str
+ snmp_mode:
+ description: Snmp Mode. ["AUTHPRIV", "AUTHNOPRIV", "NOAUTHNOPRIV"].
+ type: str
+ username:
+ description:
+ - snmp_v3 credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ assign_credentials_to_site:
+ description: Assign Device Credentials to Site.
+ type: dict
+ suboptions:
+ cli_credential:
+ description: CLI Credential.
+ type: dict
+ suboptions:
+ description:
+ description: CLI Credential Description.
+ type: str
+ username:
+ description: CLI Credential Username.
+ type: str
+ id:
+ description: CLI Credential Id. Use (Description, Username) or Id.
+ type: str
+ https_read:
+ description: HTTP(S) Read Credential
+ type: dict
+ suboptions:
+ description:
+ description: HTTP(S) Read Credential Description.
+ type: str
+ username:
+ description: HTTP(S) Read Credential Username.
+ type: str
+ id:
+ description: HTTP(S) Read Credential Id. Use (Description, Username) or Id.
+ type: str
+ https_write:
+ description: HTTP(S) Write Credential
+ type: dict
+ suboptions:
+ description:
+ description: HTTP(S) Write Credential Description.
+ type: str
+ username:
+ description: HTTP(S) Write Credential Username.
+ type: str
+ id:
+ description: HTTP(S) Write Credential Id. Use (Description, Username) or Id.
+ type: str
+ site_name:
+ description: Site Name to assign credential.
+ type: list
+ elements: str
+ snmp_v2c_read:
+ description: SNMPv2c Read Credential
+ type: dict
+ suboptions:
+ description:
+ description: SNMPv2c Read Credential Description.
+ type: str
+ id:
+ description: SNMPv2c Read Credential Id. Use Description or Id.
+ type: str
+ snmp_v2c_write:
+ description: SNMPv2c Write Credential
+ type: dict
+ suboptions:
+ description:
+ description: SNMPv2c Write Credential Description.
+ type: str
+ id:
+ description: SNMPv2c Write Credential Id. Use Description or Id.
+ type: str
+ snmp_v3:
+ description: snmp_v3 Credential
+ type: dict
+ suboptions:
+ description:
+ description: snmp_v3 Credential Description.
+ type: str
+ id:
+ description: snmp_v3 Credential Id. Use Description or Id.
+ type: str
+requirements:
+- dnacentersdk >= 2.5.5
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Discovery CreateGlobalCredentialsV2
+ description: Complete reference of the CreateGlobalCredentialsV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-global-credentials-v-2
+- name: Cisco DNA Center documentation for Discovery DeleteGlobalCredentialV2
+ description: Complete reference of the DeleteGlobalCredentialV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-global-credential-v-2
+- name: Cisco DNA Center documentation for Discovery UpdateGlobalCredentialsV2
+ description: Complete reference of the UpdateGlobalCredentialsV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-global-credentials-v-2
+- name: Cisco DNA Center documentation for Network Settings AssignDeviceCredentialToSiteV2
+ description: Complete reference of the AssignDeviceCredentialToSiteV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!assign-device-credential-to-site-v-2
+notes:
+ - SDK Method used are
+ discovery.Discovery.create_global_credentials_v2,
+ discovery.Discovery.delete_global_credential_v2,
+ discovery.Discovery.update_global_credentials_v2,
+ network_settings.NetworkSettings.assign_device_credential_to_site_v2,
+
+ - Paths used are
+ post /dna/intent/api/v2/global-credential,
+ delete /dna/intent/api/v2/global-credential/{id},
+ put /dna/intent/api/v2/global-credential,
+ post /dna/intent/api/v2/credential-to-site/{siteId},
+"""
+
+EXAMPLES = r"""
+---
+ - name: Create Credentials and assign it to a site.
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ assign_credentials_to_site:
+ cli_credential:
+ id: string
+ snmp_v2c_read:
+ id: string
+ snmp_v2c_write:
+ id: string
+ snmp_v3:
+ id: string
+ https_read:
+ id: string
+ https_write:
+ id: string
+ site_name:
+ - string
+
+ - name: Create Multiple Credentials.
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ - description: string
+ read_community: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ - description: string
+ write_community: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ - description: string
+ username: string
+ password: string
+ port: 443
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ - description: string
+ username: string
+ password: string
+ port: 443
+
+ - name: Update global device credentials using id
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ id: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ id: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+
+ - name: Update multiple global device credentials using id
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ id: string
+ - description: string
+ read_community: string
+ id: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ id: string
+ - description: string
+ write_community: string
+ id: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+
+ - name: Update global device credential name/description using old name and description.
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ old_description: string
+ old_username: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ old_description: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ old_description: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: string
+ snmp_mode: string
+ privacy_password: string
+ privacy_type: string
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: string
+ old_description: string
+ old_username: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: string
+ old_description: string
+ old_username: string
+
+ - name: Assign Credentials to sites using old description and username.
+ cisco.dnac.device_credential_intent:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - assign_credentials_to_site:
+ cli_credential:
+ description: string
+ username: string
+ snmp_v2c_read:
+ description: string
+ snmp_v2c_write:
+ description: string
+ snmp_v3:
+ description: string
+ https_read:
+ description: string
+ username: string
+ https_write:
+ description: string
+ username: string
+ site_name:
+ - string
+ - string
+
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of global device credentials
+dnac_response1:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+
+# Case_2: Successful assignment of global device credentials to a site.
+dnac_response2:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+
+import copy
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+)
+
+
+class DnacCredential(DnacBase):
+ """Class containing member attributes for device credential intent module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.result["response"] = [
+ {
+ "globalCredential": {},
+ "assignCredential": {}
+ }
+ ]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ # temp_spec is the specification for the expected structure of configuration parameters
+ temp_spec = {
+ "global_credential_details": {
+ "type": 'dict',
+ "cli_credential": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "enable_password": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_read": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "read_community": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_write": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "write_community": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v3": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "snmp_mode": {"type": 'string'},
+ "auth_type": {"type": 'string'},
+ "auth_password": {"type": 'string'},
+ "privacy_type": {"type": 'string'},
+ "privacy_password": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_read": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "port": {"type": 'integer'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_write": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "port": {"type": 'integer'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ }
+ },
+ "assign_credentials_to_site": {
+ "type": 'dict',
+ "cli_credential": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_read": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_write": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "id": {"type": 'string'},
+ },
+ "snmp_v3": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "id": {"type": 'string'},
+ },
+ "https_read": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_write": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "site_name": {
+ "type": 'list',
+ "elements": 'string'
+ }
+ }
+ }
+
+ # Validate playbook params against the specification (temp_spec)
+ self.config = self.camel_to_snake_case(self.config)
+ valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input from the playbook"
+ self.status = "success"
+ return self
+
+ def get_site_id(self, site_name):
+ """
+ Get the site id from the site name.
+ Use check_return_status() to check for failure
+
+ Parameters:
+ site_name (str) - Site name
+
+ Returns:
+ str or None - The Site Id if found, or None if not found or error
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ self.log("Received API response from 'get_site': {0}".format(response), "DEBUG")
+ if not response:
+ self.log("Failed to retrieve the site ID for the site name: {0}"
+ .format(site_name), "ERROR")
+ return None
+
+ _id = response.get("response")[0].get("id")
+ self.log("Site ID for the site name {0}: {1}".format(site_name, _id), "INFO")
+ except Exception as exec:
+ self.log("Exception occurred while getting site_id from the site_name: {0}"
+ .format(exec), "CRITICAL")
+ return None
+
+ return _id
+
+ def get_global_credentials_params(self):
+ """
+ Get the current Global Device Credentials from Cisco DNA Center.
+
+ Parameters:
+ self - The current object details.
+
+ Returns:
+ global_credentials (dict) - All global device credentials details.
+ """
+
+ try:
+ global_credentials = self.dnac._exec(
+ family="discovery",
+ function='get_all_global_credentials_v2',
+ )
+ global_credentials = global_credentials.get("response")
+ self.log("All global device credentials details: {0}"
+ .format(global_credentials), "DEBUG")
+ except Exception as exec:
+ self.log("Exception occurred while getting global device credentials: {0}"
+ .format(exec), "CRITICAL")
+ return None
+
+ return global_credentials
+
+ def get_cli_params(self, cliDetails):
+ """
+ Format the CLI parameters for the CLI credential configuration in Cisco DNA Center.
+
+ Parameters:
+ cliDetails (list of dict) - Cisco DNA Center details containing CLI Credentials.
+
+ Returns:
+ cliCredential (list of dict) - Processed CLI credential data
+ in the format suitable for the Cisco DNA Center config.
+ """
+
+ cliCredential = []
+ for item in cliDetails:
+ if item is None:
+ cliCredential.append(None)
+ else:
+ value = {
+ "username": item.get("username"),
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ cliCredential.append(value)
+ return cliCredential
+
+ def get_snmpV2cRead_params(self, snmpV2cReadDetails):
+ """
+ Format the snmpV2cRead parameters for the snmpV2cRead
+ credential configuration in Cisco DNA Center.
+
+ Parameters:
+ snmpV2cReadDetails (list of dict) - Cisco DNA Center
+ Details containing snmpV2cRead Credentials.
+
+ Returns:
+ snmpV2cRead (list of dict) - Processed snmpV2cRead credential
+ data in the format suitable for the Cisco DNA Center config.
+ """
+
+ snmpV2cRead = []
+ for item in snmpV2cReadDetails:
+ if item is None:
+ snmpV2cRead.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ snmpV2cRead.append(value)
+ return snmpV2cRead
+
+ def get_snmpV2cWrite_params(self, snmpV2cWriteDetails):
+ """
+ Format the snmpV2cWrite parameters for the snmpV2cWrite
+ credential configuration in Cisco DNA Center.
+
+ Parameters:
+ snmpV2cWriteDetails (list of dict) - Cisco DNA Center
+ Details containing snmpV2cWrite Credentials.
+
+ Returns:
+ snmpV2cWrite (list of dict) - Processed snmpV2cWrite credential
+ data in the format suitable for the Cisco DNA Center config.
+ """
+
+ snmpV2cWrite = []
+ for item in snmpV2cWriteDetails:
+ if item is None:
+ snmpV2cWrite.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ snmpV2cWrite.append(value)
+ return snmpV2cWrite
+
+ def get_httpsRead_params(self, httpsReadDetails):
+ """
+ Format the httpsRead parameters for the httpsRead
+ credential configuration in Cisco DNA Center.
+
+ Parameters:
+ httpsReadDetails (list of dict) - Cisco DNA Center
+ Details containing httpsRead Credentials.
+
+ Returns:
+ httpsRead (list of dict) - Processed httpsRead credential
+ data in the format suitable for the Cisco DNA Center config.
+ """
+
+ httpsRead = []
+ for item in httpsReadDetails:
+ if item is None:
+ httpsRead.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "username": item.get("username"),
+ "port": item.get("port"),
+ "id": item.get("id")
+ }
+ httpsRead.append(value)
+ return httpsRead
+
+ def get_httpsWrite_params(self, httpsWriteDetails):
+ """
+ Format the httpsWrite parameters for the httpsWrite
+ credential configuration in Cisco DNA Center.
+
+ Parameters:
+ httpsWriteDetails (list of dict) - Cisco DNA Center
+ Details containing httpsWrite Credentials.
+
+ Returns:
+ httpsWrite (list of dict) - Processed httpsWrite credential
+ data in the format suitable for the Cisco DNA Center config.
+ """
+
+ httpsWrite = []
+ for item in httpsWriteDetails:
+ if item is None:
+ httpsWrite.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "username": item.get("username"),
+ "port": item.get("port"),
+ "id": item.get("id")
+ }
+ httpsWrite.append(value)
+ return httpsWrite
+
+ def get_snmpV3_params(self, snmpV3Details):
+ """
+ Format the snmpV3 parameters for the snmpV3 credential configuration in Cisco DNA Center.
+
+ Parameters:
+ snmpV3Details (list of dict) - Cisco DNA Center details containing snmpV3 Credentials.
+
+ Returns:
+ snmpV3 (list of dict) - Processed snmpV3 credential
+ data in the format suitable for the Cisco DNA Center config.
+ """
+
+ snmpV3 = []
+ for item in snmpV3Details:
+ if item is None:
+ snmpV3.append(None)
+ else:
+ value = {
+ "username": item.get("username"),
+ "description": item.get("description"),
+ "snmpMode": item.get("snmpMode"),
+ "id": item.get("id"),
+ }
+ if value.get("snmpMode") == "AUTHNOPRIV":
+ value["authType"] = item.get("authType")
+ elif value.get("snmpMode") == "AUTHPRIV":
+ value.update({
+ "authType": item.get("authType"),
+ "privacyType": item.get("privacyType")
+ })
+ snmpV3.append(value)
+ return snmpV3
+
+ def get_cli_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current CLI Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ cliDetails (List) - The current CLI credentials.
+ """
+
+ # playbook CLI Credential details
+ all_CLI = CredentialDetails.get("cli_credential")
+ # All CLI details from Cisco DNA Center
+ cli_details = global_credentials.get("cliCredential")
+ # Cisco DNA Center details for the CLI Credential given in the playbook
+ cliDetails = []
+ if all_CLI and cli_details:
+ for cliCredential in all_CLI:
+ cliDetail = None
+ cliId = cliCredential.get("id")
+ if cliId:
+ cliDetail = get_dict_result(cli_details, "id", cliId)
+ if not cliDetail:
+ self.msg = "CLI credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ cliOldDescription = cliCredential.get("old_description")
+ cliOldUsername = cliCredential.get("old_username")
+ if cliOldDescription and cliOldUsername and (not cliDetail):
+ for item in cli_details:
+ if item.get("description") == cliOldDescription \
+ and item.get("username") == cliOldUsername:
+ if cliDetail:
+ self.msg = "More than one CLI credential with same \
+ old_description and old_username. Pass ID."
+ self.status = "failed"
+ return self
+ cliDetail = item
+ if not cliDetail:
+ self.msg = "CLI credential old_description or old_username is invalid"
+ self.status = "failed"
+ return self
+
+ cliDescription = cliCredential.get("description")
+ cliUsername = cliCredential.get("username")
+ if cliDescription and cliUsername and (not cliDetail):
+ for item in cli_details:
+ if item.get("description") == cliDescription \
+ and item.get("username") == cliUsername:
+ if cliDetail:
+ self.msg = "More than one CLI Credential with same \
+ description and username. Pass ID."
+ self.status = "failed"
+ return self
+ cliDetail = item
+ cliDetails.append(cliDetail)
+ return cliDetails
+
+ def get_snmpV2cRead_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV2cRead Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV2cReadDetails (List) - The current snmpV2cRead.
+ """
+
+ # Playbook snmpV2cRead Credential details
+ all_snmpV2cRead = CredentialDetails.get("snmp_v2c_read")
+ # All snmpV2cRead details from the Cisco DNA Center
+ snmpV2cRead_details = global_credentials.get("snmpV2cRead")
+ # Cisco DNA Center details for the snmpV2cRead Credential given in the playbook
+ snmpV2cReadDetails = []
+ if all_snmpV2cRead and snmpV2cRead_details:
+ for snmpV2cReadCredential in all_snmpV2cRead:
+ snmpV2cReadDetail = None
+ snmpV2cReadId = snmpV2cReadCredential.get("id")
+ if snmpV2cReadId:
+ snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
+ if not snmpV2cReadDetail:
+ self.msg = "snmpV2cRead credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cReadOldDescription = snmpV2cReadCredential.get("old_description")
+ if snmpV2cReadOldDescription and (not snmpV2cReadDetail):
+ snmpV2cReadDetail = get_dict_result(
+ snmpV2cRead_details,
+ "description",
+ snmpV2cReadOldDescription
+ )
+ if not snmpV2cReadDetail:
+ self.msg = "snmpV2cRead credential old_description is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cReadDescription = snmpV2cReadCredential.get("description")
+ if snmpV2cReadDescription and (not snmpV2cReadDetail):
+ snmpV2cReadDetail = get_dict_result(
+ snmpV2cRead_details,
+ "description",
+ snmpV2cReadDescription
+ )
+ snmpV2cReadDetails.append(snmpV2cReadDetail)
+ return snmpV2cReadDetails
+
+ def get_snmpV2cWrite_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV2cWrite Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV2cWriteDetails (List) - The current snmpV2cWrite.
+ """
+
+ # Playbook snmpV2cWrite Credential details
+ all_snmpV2cWrite = CredentialDetails.get("snmp_v2c_write")
+ # All snmpV2cWrite details from the Cisco DNA Center
+ snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
+ # Cisco DNA Center details for the snmpV2cWrite Credential given in the playbook
+ snmpV2cWriteDetails = []
+ if all_snmpV2cWrite and snmpV2cWrite_details:
+ for snmpV2cWriteCredential in all_snmpV2cWrite:
+ snmpV2cWriteDetail = None
+ snmpV2cWriteId = snmpV2cWriteCredential.get("id")
+ if snmpV2cWriteId:
+ snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
+ if not snmpV2cWriteDetail:
+ self.msg = "snmpV2cWrite credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cWriteOldDescription = snmpV2cWriteCredential.get("old_description")
+ if snmpV2cWriteOldDescription and (not snmpV2cWriteDetail):
+ snmpV2cWriteDetail = get_dict_result(
+ snmpV2cWrite_details,
+ "description",
+ snmpV2cWriteOldDescription
+ )
+ if not snmpV2cWriteDetail:
+ self.msg = "snmpV2cWrite credential old_description is invalid "
+ self.status = "failed"
+ return self
+
+ snmpV2cWriteDescription = snmpV2cWriteCredential.get("description")
+ if snmpV2cWriteDescription and (not snmpV2cWriteDetail):
+ snmpV2cWriteDetail = get_dict_result(
+ snmpV2cWrite_details,
+ "description",
+ snmpV2cWriteDescription
+ )
+ snmpV2cWriteDetails.append(snmpV2cWriteDetail)
+ return snmpV2cWriteDetails
+
+ def get_httpsRead_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current httpsRead Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ httpsReadDetails (List) - The current httpsRead.
+ """
+
+ # Playbook httpsRead Credential details
+ all_httpsRead = CredentialDetails.get("https_read")
+ # All httpsRead details from the Cisco DNA Center
+ httpsRead_details = global_credentials.get("httpsRead")
+ # Cisco DNA Center details for the httpsRead Credential given in the playbook
+ httpsReadDetails = []
+ if all_httpsRead and httpsRead_details:
+ for httpsReadCredential in all_httpsRead:
+ httpsReadDetail = None
+ httpsReadId = httpsReadCredential.get("id")
+ if httpsReadId:
+ httpsReadDetail = get_dict_result(httpsRead_details, "id", httpsReadId)
+ if not httpsReadDetail:
+ self.msg = "httpsRead credential Id is invalid"
+ self.status = "failed"
+ return self
+
+ httpsReadOldDescription = httpsReadCredential.get("old_description")
+ httpsReadOldUsername = httpsReadCredential.get("old_username")
+ if httpsReadOldDescription and httpsReadOldUsername and (not httpsReadDetail):
+ for item in httpsRead_details:
+ if item.get("description") == httpsReadOldDescription \
+ and item.get("username") == httpsReadOldUsername:
+ if httpsReadDetail:
+ self.msg = "More than one httpsRead credential with same \
+ old_description and old_username. Pass ID."
+ self.status = "failed"
+ return self
+ httpsReadDetail = item
+ if not httpsReadDetail:
+ self.msg = "httpsRead credential old_description or old_username is invalid"
+ self.status = "failed"
+ return self
+
+ httpsReadDescription = httpsReadCredential.get("description")
+ httpsReadUsername = httpsReadCredential.get("username")
+ if httpsReadDescription and httpsReadUsername and (not httpsReadDetail):
+ for item in httpsRead_details:
+ if item.get("description") == httpsReadDescription \
+ and item.get("username") == httpsReadUsername:
+ if httpsReadDetail:
+ self.msg = "More than one httpsRead credential with same \
+ description and username. Pass ID."
+ self.status = "failed"
+ return self
+ httpsReadDetail = item
+ httpsReadDetails.append(httpsReadDetail)
+ return httpsReadDetails
+
+ def get_httpsWrite_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current httpsWrite Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ httpsWriteDetails (List) - The current httpsWrite.
+ """
+
+ # Playbook httpsWrite Credential details
+ all_httpsWrite = CredentialDetails.get("https_write")
+ # All httpsWrite details from the Cisco DNA Center
+ httpsWrite_details = global_credentials.get("httpsWrite")
+ # Cisco DNA Center details for the httpsWrite Credential given in the playbook
+ httpsWriteDetails = []
+ if all_httpsWrite and httpsWrite_details:
+ for httpsWriteCredential in all_httpsWrite:
+ httpsWriteDetail = None
+ httpsWriteId = httpsWriteCredential.get("id")
+ if httpsWriteId:
+ httpsWriteDetail = get_dict_result(httpsWrite_details, "id", httpsWriteId)
+ if not httpsWriteDetail:
+ self.msg = "httpsWrite credential Id is invalid"
+ self.status = "failed"
+ return self
+
+ httpsWriteOldDescription = httpsWriteCredential.get("old_description")
+ httpsWriteOldUsername = httpsWriteCredential.get("old_username")
+ if httpsWriteOldDescription and httpsWriteOldUsername and (not httpsWriteDetail):
+ for item in httpsWrite_details:
+ if item.get("description") == httpsWriteOldDescription \
+ and item.get("username") == httpsWriteOldUsername:
+ if httpsWriteDetail:
+ self.msg = "More than one httpsWrite credential with same \
+ old_description and old_username. Pass ID"
+ self.status = "failed"
+ return self
+ httpsWriteDetail = item
+ if not httpsWriteDetail:
+ self.msg = "httpsWrite credential old_description or \
+ old_username is invalid"
+ self.status = "failed"
+ return self
+
+ httpsWriteDescription = httpsWriteCredential.get("description")
+ httpsWriteUsername = httpsWriteCredential.get("username")
+ if httpsWriteDescription and httpsWriteUsername and (not httpsWriteDetail):
+ for item in httpsWrite_details:
+ if item.get("description") == httpsWriteDescription \
+ and item.get("username") == httpsWriteUsername:
+ httpsWriteDetail = item
+ httpsWriteDetails.append(httpsWriteDetail)
+ return httpsWriteDetails
+
+ def get_snmpV3_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV3 Credential from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV3Details (List) - The current snmpV3.
+ """
+
+ # Playbook snmpV3 Credential details
+ all_snmpV3 = CredentialDetails.get("snmp_v3")
+ # All snmpV3 details from the Cisco DNA Center
+ snmpV3_details = global_credentials.get("snmpV3")
+ # Cisco DNA Center details for the snmpV3 Credential given in the playbook
+ snmpV3Details = []
+ if all_snmpV3 and snmpV3_details:
+ for snmpV3Credential in all_snmpV3:
+ snmpV3Detail = None
+ snmpV3Id = snmpV3Credential.get("id")
+ if snmpV3Id:
+ snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
+ if not snmpV3Detail:
+ self.msg = "snmpV3 credential id is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV3OldDescription = snmpV3Credential.get("old_description")
+ if snmpV3OldDescription and (not snmpV3Detail):
+ snmpV3Detail = get_dict_result(snmpV3_details,
+ "description", snmpV3OldDescription)
+ if not snmpV3Detail:
+ self.msg = "snmpV3 credential old_description is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV3Description = snmpV3Credential.get("description")
+ if snmpV3Description and (not snmpV3Detail):
+ snmpV3Detail = get_dict_result(snmpV3_details, "description", snmpV3Description)
+ snmpV3Details.append(snmpV3Detail)
+ return snmpV3Details
+
+ def get_have_device_credentials(self, CredentialDetails):
+ """
+ Get the current Global Device Credentials from
+ Cisco DNA Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ global_credentials = self.get_global_credentials_params()
+ cliDetails = self.get_cli_credentials(CredentialDetails, global_credentials)
+ snmpV2cReadDetails = self.get_snmpV2cRead_credentials(CredentialDetails, global_credentials)
+ snmpV2cWriteDetails = self.get_snmpV2cWrite_credentials(CredentialDetails,
+ global_credentials)
+ httpsReadDetails = self.get_httpsRead_credentials(CredentialDetails, global_credentials)
+ httpsWriteDetails = self.get_httpsWrite_credentials(CredentialDetails, global_credentials)
+ snmpV3Details = self.get_snmpV3_credentials(CredentialDetails, global_credentials)
+ self.have.update({"globalCredential": {}})
+ if cliDetails:
+ cliCredential = self.get_cli_params(cliDetails)
+ self.have.get("globalCredential").update({"cliCredential": cliCredential})
+ if snmpV2cReadDetails:
+ snmpV2cRead = self.get_snmpV2cRead_params(snmpV2cReadDetails)
+ self.have.get("globalCredential").update({"snmpV2cRead": snmpV2cRead})
+ if snmpV2cWriteDetails:
+ snmpV2cWrite = self.get_snmpV2cWrite_params(snmpV2cWriteDetails)
+ self.have.get("globalCredential").update({"snmpV2cWrite": snmpV2cWrite})
+ if httpsReadDetails:
+ httpsRead = self.get_httpsRead_params(httpsReadDetails)
+ self.have.get("globalCredential").update({"httpsRead": httpsRead})
+ if httpsWriteDetails:
+ httpsWrite = self.get_httpsWrite_params(httpsWriteDetails)
+ self.have.get("globalCredential").update({"httpsWrite": httpsWrite})
+ if snmpV3Details:
+ snmpV3 = self.get_snmpV3_params(snmpV3Details)
+ self.have.get("globalCredential").update({"snmpV3": snmpV3})
+
+ self.log("Global device credential details: {0}"
+ .format(self.have.get("globalCredential")), "DEBUG")
+ self.msg = "Collected the Global Device Credential Details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current Global Device Credentials and
+ Device Credentials assigned to a site in Cisco DNA Center.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device
+ Credentials configurations and Device Credentials should
+ be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of Global
+ Device Credentials and Device Credentials assigned to a site.
+ """
+
+ if config.get("global_credential_details") is not None:
+ CredentialDetails = config.get("global_credential_details")
+ self.get_have_device_credentials(CredentialDetails).check_return_status()
+
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.msg = "Successfully retrieved the details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_want_device_credentials(self, CredentialDetails):
+ """
+ Get the Global Device Credentials from the playbook.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+
+ Returns:
+ self - The current object with updated information of
+ Global Device Credentials from the playbook.
+ """
+
+ want = {
+ "want_create": {},
+ "want_update": {}
+ }
+ if CredentialDetails.get("cli_credential"):
+ cli = CredentialDetails.get("cli_credential")
+ have_cli_ptr = 0
+ create_cli_ptr = 0
+ update_cli_ptr = 0
+ values = ["password", "description", "username", "id"]
+ have_cliCredential = self.have.get("globalCredential").get("cliCredential")
+ for item in cli:
+ if not have_cliCredential or have_cliCredential[have_cli_ptr] is None:
+ if want.get("want_create").get("cliCredential") is None:
+ want.get("want_create").update({"cliCredential": []})
+ create_credential = want.get("want_create").get("cliCredential")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_cli_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ cliCredential " + str(have_cli_ptr)
+ self.status = "failed"
+ return self
+
+ if item.get("enable_password"):
+ create_credential[create_cli_ptr] \
+ .update({"enablePassword": item.get("enable_password")})
+ create_cli_ptr = create_cli_ptr + 1
+ else:
+ if want.get("want_update").get("cliCredential") is None:
+ want.get("want_update").update({"cliCredential": []})
+ update_credential = want.get("want_update").get("cliCredential")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_cli_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for udpating \
+ cliCredential " + str(have_cli_ptr)
+ self.status = "failed"
+ return self
+
+ for i in range(1, 4):
+ if item.get(values[i]):
+ update_credential[update_cli_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_cli_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("cliCredential")[have_cli_ptr].get(values[i])
+ })
+
+ if item.get("enable_password"):
+ update_credential[update_cli_ptr].update({
+ "enablePassword": item.get("enable_password")
+ })
+ update_cli_ptr = update_cli_ptr + 1
+ have_cli_ptr = have_cli_ptr + 1
+
+ if CredentialDetails.get("snmp_v2c_read"):
+ snmpV2cRead = CredentialDetails.get("snmp_v2c_read")
+ have_snmpv2cread_ptr = 0
+ create_snmpv2cread_ptr = 0
+ update_snmpv2cread_ptr = 0
+ values = ["read_community", "description", "id"]
+ keys = ["readCommunity", "description", "id"]
+ have_snmpV2cRead = self.have.get("globalCredential").get("snmpV2cRead")
+ for item in snmpV2cRead:
+ if not have_snmpV2cRead or have_snmpV2cRead[have_snmpv2cread_ptr] is None:
+ if want.get("want_create").get("snmpV2cRead") is None:
+ want.get("want_create").update({"snmpV2cRead": []})
+ create_credential = want.get("want_create").get("snmpV2cRead")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv2cread_ptr] \
+ .update({keys[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.status = "failed"
+ return self
+ create_snmpv2cread_ptr = create_snmpv2cread_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV2cRead") is None:
+ want.get("want_update").update({"snmpV2cRead": []})
+ update_credential = want.get("want_update").get("snmpV2cRead")
+ update_credential.append({})
+ if item.get("read_community"):
+ update_credential[update_snmpv2cread_ptr] \
+ .update({"readCommunity": item.get("read_community")})
+ else:
+ self.msg = "read_community is mandatory for updating \
+ snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 3):
+ if item.get(values[i]):
+ update_credential[update_snmpv2cread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_snmpv2cread_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("snmpV2cRead")[have_snmpv2cread_ptr].get(values[i])
+ })
+ update_snmpv2cread_ptr = update_snmpv2cread_ptr + 1
+ have_snmpv2cread_ptr = have_snmpv2cread_ptr + 1
+
+ if CredentialDetails.get("snmp_v2c_write"):
+ snmpV2cWrite = CredentialDetails.get("snmp_v2c_write")
+ have_snmpv2cwrite_ptr = 0
+ create_snmpv2cwrite_ptr = 0
+ update_snmpv2cwrite_ptr = 0
+ values = ["write_community", "description", "id"]
+ keys = ["writeCommunity", "description", "id"]
+ have_snmpV2cWrite = self.have.get("globalCredential").get("snmpV2cWrite")
+ for item in snmpV2cWrite:
+ if not have_snmpV2cWrite or have_snmpV2cWrite[have_snmpv2cwrite_ptr] is None:
+ if want.get("want_create").get("snmpV2cWrite") is None:
+ want.get("want_create").update({"snmpV2cWrite": []})
+ create_credential = want.get("want_create").get("snmpV2cWrite")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv2cwrite_ptr] \
+ .update({keys[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.status = "failed"
+ return self
+ create_snmpv2cwrite_ptr = create_snmpv2cwrite_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV2cWrite") is None:
+ want.get("want_update").update({"snmpV2cWrite": []})
+ update_credential = want.get("want_update").get("snmpV2cWrite")
+ update_credential.append({})
+ if item.get("write_community"):
+ update_credential[update_snmpv2cwrite_ptr] \
+ .update({"writeCommunity": item.get("write_community")})
+ else:
+ self.msg = "write_community is mandatory for updating \
+ snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 3):
+ if item.get(values[i]):
+ update_credential[update_snmpv2cwrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_snmpv2cwrite_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("snmpV2cWrite")[have_snmpv2cwrite_ptr].get(values[i])
+ })
+ update_snmpv2cwrite_ptr = update_snmpv2cwrite_ptr + 1
+ have_snmpv2cwrite_ptr = have_snmpv2cwrite_ptr + 1
+
+ if CredentialDetails.get("https_read"):
+ httpsRead = CredentialDetails.get("https_read")
+ have_httpsread_ptr = 0
+ create_httpsread_ptr = 0
+ update_httpsread_ptr = 0
+ values = ["password", "description", "username", "id", "port"]
+ have_httpsRead = self.have.get("globalCredential").get("httpsRead")
+ for item in httpsRead:
+ self.log("Global credentials details: {0}"
+ .format(self.have.get("globalCredential")), "DEBUG")
+ if not have_httpsRead or have_httpsRead[have_httpsread_ptr] is None:
+ if want.get("want_create").get("httpsRead") is None:
+ want.get("want_create").update({"httpsRead": []})
+ create_credential = want.get("want_create").get("httpsRead")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_httpsread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ httpsRead " + str(have_httpsread_ptr)
+ self.status = "failed"
+ return self
+ if item.get("port"):
+ create_credential[create_httpsread_ptr] \
+ .update({"port": item.get("port")})
+ else:
+ create_credential[create_httpsread_ptr] \
+ .update({"port": "443"})
+ create_httpsread_ptr = create_httpsread_ptr + 1
+ else:
+ if want.get("want_update").get("httpsRead") is None:
+ want.get("want_update").update({"httpsRead": []})
+ update_credential = want.get("want_update").get("httpsRead")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_httpsread_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for updating \
+ httpsRead " + str(have_httpsread_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 5):
+ if item.get(values[i]):
+ update_credential[update_httpsread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_httpsread_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("httpsRead")[have_httpsread_ptr].get(values[i])
+ })
+ update_httpsread_ptr = update_httpsread_ptr + 1
+ have_httpsread_ptr = have_httpsread_ptr + 1
+
+ if CredentialDetails.get("https_write"):
+ httpsWrite = CredentialDetails.get("https_write")
+ have_httpswrite_ptr = 0
+ create_httpswrite_ptr = 0
+ update_httpswrite_ptr = 0
+ values = ["password", "description", "username", "id", "port"]
+ have_httpsWrite = self.have.get("globalCredential").get("httpsWrite")
+ for item in httpsWrite:
+ if not have_httpsWrite or have_httpsWrite[have_httpswrite_ptr] is None:
+ if want.get("want_create").get("httpsWrite") is None:
+ want.get("want_create").update({"httpsWrite": []})
+ create_credential = want.get("want_create").get("httpsWrite")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_httpswrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ httpsWrite " + str(have_httpswrite_ptr)
+ self.status = "failed"
+ return self
+ if item.get("port"):
+ create_credential[create_httpswrite_ptr] \
+ .update({"port": item.get("port")})
+ else:
+ create_credential[create_httpswrite_ptr] \
+ .update({"port": "443"})
+ create_httpswrite_ptr = create_httpswrite_ptr + 1
+ else:
+ if want.get("want_update").get("httpsWrite") is None:
+ want.get("want_update").update({"httpsWrite": []})
+ update_credential = want.get("want_update").get("httpsWrite")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_httpswrite_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for updating \
+ httpsRead " + str(have_httpswrite_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 5):
+ if item.get(values[i]):
+ update_credential[update_httpswrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_httpswrite_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("httpsWrite")[have_httpswrite_ptr].get(values[i])
+ })
+ update_httpswrite_ptr = update_httpswrite_ptr + 1
+ have_httpswrite_ptr = have_httpswrite_ptr + 1
+
+ if CredentialDetails.get("snmp_v3"):
+ snmpV3 = CredentialDetails.get("snmp_v3")
+ have_snmpv3_ptr = 0
+ create_snmpv3_ptr = 0
+ update_snmpv3_ptr = 0
+ values = ["description", "username", "id"]
+ have_snmpV3 = self.have.get("globalCredential").get("snmpV3")
+ for item in snmpV3:
+ if not have_snmpV3 or have_snmpV3[have_snmpv3_ptr] is None:
+ if want.get("want_create").get("snmpV3") is None:
+ want.get("want_create").update({"snmpV3": []})
+ create_credential = want.get("want_create").get("snmpV3")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv3_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("snmp_mode"):
+ create_credential[create_snmpv3_ptr] \
+ .update({"snmpMode": item.get("snmp_mode")})
+ else:
+ create_credential[create_snmpv3_ptr] \
+ .update({"snmpMode": "AUTHPRIV"})
+ if create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHNOPRIV" or \
+ create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ auths = ["auth_password", "auth_type"]
+ keys = {
+ "auth_password": "authPassword",
+ "auth_type": "authType"
+ }
+ for auth in auths:
+ if item.get(auth):
+ create_credential[create_snmpv3_ptr] \
+ .update({keys[auth]: item.get(auth)})
+ else:
+ self.msg = auth + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("auth_password")) < 8:
+ self.msg = "auth_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ self.log("snmp_mode: {0}".format(create_credential[create_snmpv3_ptr]
+ .get("snmpMode")), "DEBUG")
+ if create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ privs = ["privacy_password", "privacy_type"]
+ key = {
+ "privacy_password": "privacyPassword",
+ "privacy_type": "privacyType"
+ }
+ for priv in privs:
+ if item.get(priv):
+ create_credential[create_snmpv3_ptr] \
+ .update({key[priv]: item.get(priv)})
+ else:
+ self.msg = priv + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("privacy_password")) < 8:
+ self.msg = "privacy_password should be greater than 8"
+ self.status = "failed"
+ return self
+ elif create_credential[create_snmpv3_ptr].get("snmpMode") != "NOAUTHNOPRIV":
+ self.msg = "snmp_mode in snmpV3 is not \
+ ['AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV']"
+ self.status = "failed"
+ return self
+ create_snmpv3_ptr = create_snmpv3_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV3") is None:
+ want.get("want_update").update({"snmpV3": []})
+ update_credential = want.get("want_update").get("snmpV3")
+ update_credential.append({})
+ for value in values:
+ if item.get(value):
+ update_credential[update_snmpv3_ptr] \
+ .update({value: item.get(value)})
+ else:
+ update_credential[update_snmpv3_ptr].update({
+ value: self.have.get("globalCredential")
+ .get("snmpV3")[have_snmpv3_ptr].get(value)
+ })
+ if item.get("snmp_mode"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"snmpMode": item.get("snmp_mode")})
+ if update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHNOPRIV" or \
+ update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ if item.get("auth_type"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"authType": item.get("auth_type")})
+ elif self.have.get("globalCredential") \
+ .get("snmpMode")[have_snmpv3_ptr].get("authType"):
+ update_credential[update_snmpv3_ptr].update({
+ "authType": self.have.get("globalCredential")
+ .get("snmpMode")[have_snmpv3_ptr].get("authType")
+ })
+ else:
+ self.msg = "auth_type is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("auth_password"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"authPassword": item.get("auth_password")})
+ else:
+ self.msg = "auth_password is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("auth_password")) < 8:
+ self.msg = "auth_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ elif update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ if item.get("privacy_type"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"privacyType": item.get("privacy_type")})
+ elif self.have.get("globalCredential") \
+ .get("snmpMode")[have_snmpv3_ptr].get("privacyType"):
+ update_credential[update_snmpv3_ptr].update({
+ "privacyType": self.have.get("globalCredential")
+ .get("snmpMode")[have_snmpv3_ptr].get("privacyType")
+ })
+ else:
+ self.msg = "privacy_type is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("privacy_password"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"privacyPassword": item.get("privacy_password")})
+ else:
+ self.msg = "privacy_password is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("privacy_password")) < 8:
+ self.msg = "privacy_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ update_snmpv3_ptr = update_snmpv3_ptr + 1
+ have_snmpv3_ptr = have_snmpv3_ptr + 1
+ self.want.update(want)
+ self.msg = "Collected the Global Credentials from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_want_assign_credentials(self, AssignCredentials):
+ """
+ Get the Credentials to be assigned to a site from the playbook.
+ Check this API using the check_return_status.
+
+ Parameters:
+ AssignCredentials (dict) - Playbook details containing
+ credentials that need to be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of credentials
+ that need to be assigned to a site from the playbook.
+ """
+ want = {
+ "assign_credentials": {}
+ }
+ site_name = AssignCredentials.get("site_name")
+ if not site_name:
+ self.msg = "site_name is required for AssignCredentials"
+ self.status = "failed"
+ return self
+ site_id = []
+ for site_name in site_name:
+ siteId = self.get_site_id(site_name)
+ if not site_name:
+ self.msg = "site_name is invalid in AssignCredentials"
+ self.status = "failed"
+ return self
+ site_id.append(siteId)
+ want.update({"site_id": site_id})
+ global_credentials = self.get_global_credentials_params()
+ cli_credential = AssignCredentials.get("cli_credential")
+ if cli_credential:
+ cliId = cli_credential.get("id")
+ cliDescription = cli_credential.get("description")
+ cliUsername = cli_credential.get("username")
+
+ if cliId or cliDescription and cliUsername:
+ # All CLI details from the Cisco DNA Center
+ cli_details = global_credentials.get("cliCredential")
+ if not cli_details:
+ self.msg = "Global CLI credential is not available"
+ self.status = "failed"
+ return self
+ cliDetail = None
+ if cliId:
+ cliDetail = get_dict_result(cli_details, "id", cliId)
+ if not cliDetail:
+ self.msg = "The ID for the CLI credential is not valid."
+ self.status = "failed"
+ return self
+ elif cliDescription and cliUsername:
+ for item in cli_details:
+ if item.get("description") == cliDescription and \
+ item.get("username") == cliUsername:
+ cliDetail = item
+ if not cliDetail:
+ self.msg = "The username and description of the CLI credential are invalid"
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"cliId": cliDetail.get("id")})
+
+ snmp_v2c_read = AssignCredentials.get("snmp_v2c_read")
+ if snmp_v2c_read:
+ snmpV2cReadId = snmp_v2c_read.get("id")
+ snmpV2cReadDescription = snmp_v2c_read.get("description")
+ if snmpV2cReadId or snmpV2cReadDescription:
+
+ # All snmpV2cRead details from the Cisco DNA Center
+ snmpV2cRead_details = global_credentials.get("snmpV2cRead")
+ if not snmpV2cRead_details:
+ self.msg = "Global snmpV2cRead credential is not available"
+ self.status = "failed"
+ return self
+ snmpV2cReadDetail = None
+ if snmpV2cReadId:
+ snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
+ if not snmpV2cReadDetail:
+ self.msg = "The ID of the snmpV2cRead credential is not valid."
+ self.status = "failed"
+ return self
+ elif snmpV2cReadDescription:
+ for item in snmpV2cRead_details:
+ if item.get("description") == snmpV2cReadDescription:
+ snmpV2cReadDetail = item
+ if not snmpV2cReadDetail:
+ self.msg = "The username and description for the snmpV2cRead credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV2ReadId": snmpV2cReadDetail.get("id")})
+
+ snmp_v2c_write = AssignCredentials.get("snmp_v2c_write")
+ if snmp_v2c_write:
+ snmpV2cWriteId = snmp_v2c_write.get("id")
+ snmpV2cWriteDescription = snmp_v2c_write.get("description")
+ if snmpV2cWriteId or snmpV2cWriteDescription:
+
+ # All snmpV2cWrite details from the Cisco DNA Center
+ snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
+ if not snmpV2cWrite_details:
+ self.msg = "Global snmpV2cWrite Credential is not available"
+ self.status = "failed"
+ return self
+ snmpV2cWriteDetail = None
+ if snmpV2cWriteId:
+ snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
+ if not snmpV2cWriteDetail:
+ self.msg = "The ID of the snmpV2cWrite credential is invalid."
+ self.status = "failed"
+ return self
+ elif snmpV2cWriteDescription:
+ for item in snmpV2cWrite_details:
+ if item.get("description") == snmpV2cWriteDescription:
+ snmpV2cWriteDetail = item
+ if not snmpV2cWriteDetail:
+ self.msg = "The username and description of the snmpV2cWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV2WriteId": snmpV2cWriteDetail.get("id")})
+
+ https_read = AssignCredentials.get("https_read")
+ if https_read:
+ httpReadId = https_read.get("id")
+ httpReadDescription = https_read.get("description")
+ httpReadUsername = https_read.get("username")
+ if httpReadId or httpReadDescription and httpReadUsername:
+
+ # All httpRead details from the Cisco DNA Center
+ httpRead_details = global_credentials.get("httpsRead")
+ if not httpRead_details:
+ self.msg = "Global httpRead Credential is not available."
+ self.status = "failed"
+ return self
+ httpReadDetail = None
+ if httpReadId:
+ httpReadDetail = get_dict_result(httpRead_details, "id", httpReadId)
+ if not httpReadDetail:
+ self.msg = "The ID of the httpRead credential is not valid."
+ self.status = "failed"
+ return self
+ elif httpReadDescription and httpReadUsername:
+ for item in httpRead_details:
+ if item.get("description") == httpReadDescription and \
+ item.get("username") == httpReadUsername:
+ httpReadDetail = item
+ if not httpReadDetail:
+ self.msg = "The description and username for the httpRead credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"httpRead": httpReadDetail.get("id")})
+
+ https_write = AssignCredentials.get("https_write")
+ if https_write:
+ httpWriteId = https_write.get("id")
+ httpWriteDescription = https_write.get("description")
+ httpWriteUsername = https_write.get("username")
+ if httpWriteId or httpWriteDescription and httpWriteUsername:
+
+ # All httpWrite details from the Cisco DNA Center
+ httpWrite_details = global_credentials.get("httpsWrite")
+ if not httpWrite_details:
+ self.msg = "Global httpWrite credential is not available."
+ self.status = "failed"
+ return self
+ httpWriteDetail = None
+ if httpWriteId:
+ httpWriteDetail = get_dict_result(httpWrite_details, "id", httpWriteId)
+ if not httpWriteDetail:
+ self.msg = "The ID of the httpWrite credential is not valid."
+ self.status = "failed"
+ return self
+ elif httpWriteDescription and httpWriteUsername:
+ for item in httpWrite_details:
+ if item.get("description") == httpWriteDescription and \
+ item.get("username") == httpWriteUsername:
+ httpWriteDetail = item
+ if not httpWriteDetail:
+ self.msg = "The description and username for the httpWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"httpWrite": httpWriteDetail.get("id")})
+
+ snmp_v3 = AssignCredentials.get("snmp_v3")
+ if snmp_v3:
+ snmpV3Id = snmp_v3.get("id")
+ snmpV3Description = snmp_v3.get("description")
+ if snmpV3Id or snmpV3Description:
+
+ # All snmpV3 details from the Cisco DNA Center
+ snmpV3_details = global_credentials.get("snmpV3")
+ if not snmpV3_details:
+ self.msg = "Global snmpV3 Credential is not available."
+ self.status = "failed"
+ return self
+ snmpV3Detail = None
+ if snmpV3Id:
+ snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
+ if not snmpV3Detail:
+ self.msg = "The ID of the snmpV3 credential is not valid."
+ self.status = "failed"
+ return self
+ elif snmpV3Description:
+ for item in snmpV3_details:
+ if item.get("description") == snmpV3Description:
+ snmpV3Detail = item
+ if not snmpV3Detail:
+ self.msg = "The username and description for the snmpV2cWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV3Id": snmpV3Detail.get("id")})
+ self.log("Desired State (want): {0}".format(want), "INFO")
+ self.want.update(want)
+ self.msg = "Collected the Credentials needed to be assigned from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_want(self, config):
+ """
+ Get the current Global Device Credentials and Device
+ Credentials assigned to a site form the playbook.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device
+ Credentials configurations and Device Credentials should
+ be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of Global
+ Device Credentials and Device Credentials assigned to a site.
+ """
+
+ if config.get("global_credential_details"):
+ CredentialDetails = config.get("global_credential_details")
+ self.get_want_device_credentials(CredentialDetails).check_return_status()
+
+ if config.get("assign_credentials_to_site"):
+ AssignCredentials = config.get("assign_credentials_to_site")
+ self.get_want_assign_credentials(AssignCredentials).check_return_status()
+
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Successfully retrieved details from the playbook"
+ self.status = "success"
+ return self
+
+ def create_device_credentials(self):
+ """
+ Create Global Device Credential to the Cisco DNA
+ Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+ want_create = self.want.get("want_create")
+ if not want_create:
+ result_global_credential.update({
+ "No Creation": {
+ "response": "No Response",
+ "msg": "No Creation is available"
+ }
+ })
+ return self
+
+ credential_params = want_create
+ self.log("Creating global credential API input parameters: {0}"
+ .format(credential_params), "DEBUG")
+ response = self.dnac._exec(
+ family="discovery",
+ function='create_global_credentials_v2',
+ params=credential_params,
+ )
+ self.log("Received API response from 'create_global_credentials_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential addition performed"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Global credential created successfully", "INFO")
+ result_global_credential.update({
+ "Creation": {
+ "response": credential_params,
+ "msg": "Global Credential Created Successfully"
+ }
+ })
+ self.msg = "Global Device Credential Created Successfully"
+ self.status = "success"
+ return self
+
+ def update_device_credentials(self):
+ """
+ Update Device Credential to the Cisco DNA Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+
+ # Get the result global credential and want_update from the current object
+ want_update = self.want.get("want_update")
+ # If no credentials to update, update the result and return
+ if not want_update:
+ result_global_credential.update({
+ "No Updation": {
+ "response": "No Response",
+ "msg": "No Updation is available"
+ }
+ })
+ self.msg = "No Updation is available"
+ self.status = "success"
+ return self
+ i = 0
+ flag = True
+ values = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ final_response = []
+ self.log("Desired State for global device credentials updation: {0}"
+ .format(want_update), "DEBUG")
+ while flag:
+ flag = False
+ credential_params = {}
+ for value in values:
+ if want_update.get(value) and i < len(want_update.get(value)):
+ flag = True
+ credential_params.update({value: want_update.get(value)[i]})
+ i = i + 1
+ if credential_params:
+ final_response.append(credential_params)
+ response = self.dnac._exec(
+ family="discovery",
+ function='update_global_credentials_v2',
+ params=credential_params,
+ )
+ self.log("Received API response for 'update_global_credentials_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential update performed"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Updating device credential API input parameters: {0}"
+ .format(final_response), "DEBUG")
+ self.log("Global device credential updated successfully", "INFO")
+ result_global_credential.update({
+ "Updation": {
+ "response": final_response,
+ "msg": "Global Device Credential Updated Successfully"
+ }
+ })
+ self.msg = "Global Device Credential Updated Successfully"
+ self.status = "success"
+ return self
+
+ def assign_credentials_to_site(self):
+ """
+ Assign Global Device Credential to the Cisco DNA
+ Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_assign_credential = self.result.get("response")[0].get("assignCredential")
+ credential_params = self.want.get("assign_credentials")
+ final_response = []
+ self.log("Assigning device credential to site API input parameters: {0}"
+ .format(credential_params), "DEBUG")
+ if not credential_params:
+ result_assign_credential.update({
+ "No Assign Credentials": {
+ "response": "No Response",
+ "msg": "No Assignment is available"
+ }
+ })
+ self.msg = "No Assignment is available"
+ self.status = "success"
+ return self
+
+ site_ids = self.want.get("site_id")
+ for site_id in site_ids:
+ credential_params.update({"site_id": site_id})
+ final_response.append(copy.deepcopy(credential_params))
+ response = self.dnac._exec(
+ family="network_settings",
+ function='assign_device_credential_to_site_v2',
+ params=credential_params,
+ )
+ self.log("Received API response for 'assign_device_credential_to_site_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "desired common settings operation successful"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Device credential assigned to site {0} is successfully."
+ .format(site_ids), "INFO")
+ self.log("Desired State for assign credentials to a site: {0}"
+ .format(final_response), "DEBUG")
+ result_assign_credential.update({
+ "Assign Credentials": {
+ "response": final_response,
+ "msg": "Device Credential Assigned to a site is Successfully"
+ }
+ })
+ self.msg = "Global Credential is assigned Successfully"
+ self.status = "success"
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update or Create Global Device Credential and assign device
+ credential to a site in Cisco DNA Center based on the playbook provided.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Global
+ Device Credential and assign credentials to a site information.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_credential_details") is not None:
+ self.create_device_credentials().check_return_status()
+
+ if config.get("global_credential_details") is not None:
+ self.update_device_credentials().check_return_status()
+
+ if config.get("assign_credentials_to_site") is not None:
+ self.assign_credentials_to_site().check_return_status()
+
+ return self
+
+ def delete_device_credential(self, config):
+ """
+ Delete Global Device Credential in Cisco DNA Center based on the playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device Credential information.
+ self - The current object details.
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+ have_values = self.have.get("globalCredential")
+ final_response = {}
+ self.log("Global device credentials to be deleted: {0}".format(have_values), "DEBUG")
+ credential_mapping = {
+ "cliCredential": "cli_credential",
+ "snmpV2cRead": "snmp_v2c_read",
+ "snmpV2cWrite": "snmp_v2c_write",
+ "snmpV3": "snmp_v3",
+ "httpsRead": "https_read",
+ "httpsWrite": "https_write"
+ }
+ for item in have_values:
+ config_itr = 0
+ final_response.update({item: []})
+ for value in have_values.get(item):
+ if value is None:
+ self.log("Credential Name: {0}".format(item), "DEBUG")
+ self.log("Credential Item: {0}".format(config.get("global_credential_details")
+ .get(credential_mapping.get(item))), "DEBUG")
+ final_response.get(item).append(
+ str(config.get("global_credential_details")
+ .get(credential_mapping.get(item))[config_itr]) + " is not found."
+ )
+ continue
+ _id = have_values.get(item)[config_itr].get("id")
+ response = self.dnac._exec(
+ family="discovery",
+ function="delete_global_credential_v2",
+ params={"id": _id},
+ )
+ self.log("Received API response for 'delete_global_credential_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential deleted successfully"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ final_response.get(item).append(_id)
+ config_itr = config_itr + 1
+
+ self.log("Deleting device credential API input parameters: {0}"
+ .format(final_response), "DEBUG")
+ self.log("Successfully deleted global device credential.", "INFO")
+ result_global_credential.update({
+ "Deletion": {
+ "response": final_response,
+ "msg": "Global Device Credentials Deleted Successfully"
+ }
+ })
+ self.msg = "Global Device Credentials Updated Successfully"
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete Global Device Credential in Cisco DNA Center based on the playbook details.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device Credential information.
+ self - The current object details.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_credential_details") is not None:
+ self.delete_device_credential(config).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.log(str("Entered the verify function."), "DEBUG")
+ self.get_have(config)
+ self.get_want(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+
+ if config.get("global_credential_details") is not None:
+ if self.want.get("want_create"):
+ self.msg = "Global Device Credentials config is not applied to the DNAC"
+ self.status = "failed"
+ return self
+
+ if self.want.get("want_update"):
+ credential_types = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ value_mapping = {
+ "cliCredential": ["username", "description", "id"],
+ "snmpV2cRead": ["description", "id"],
+ "snmpV2cWrite": ["description", "id"],
+ "httpsRead": ["description", "username", "port", "id"],
+ "httpsWrite": ["description", "username", "port", "id"],
+ "snmpV3": ["username", "description", "snmpMode", "id"]
+ }
+ for credential_type in credential_types:
+ if self.want.get(credential_type):
+ want_credential = self.want.get(credential_type)
+ if self.have.get(credential_type):
+ have_credential = self.have.get(credential_type)
+ values = value_mapping.get(credential_type)
+ for value in values:
+ equality = have_credential.get(value) is want_credential.get(value)
+ if not have_credential or not equality:
+ self.msg = "{0} config is not applied ot the DNAC".format(credential_type)
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated global device credential", "INFO")
+ self.result.get("response")[0].get("globalCredential").update({"Validation": "Success"})
+
+ if config.get("assign_credentials_to_site") is not None:
+ self.log("Successfully validated the assign device credential to site", "INFO")
+ self.result.get("response")[0].get("assignCredential").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Global Device Credential and \
+ Assign Device Credential to Site."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+
+ if config.get("global_credential_details") is not None:
+ have_global_credential = self.have.get("globalCredential")
+ credential_types = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ for credential_type in credential_types:
+ for item in have_global_credential.get(credential_type):
+ if item is not None:
+ self.msg = "Delete Global Device Credentials config \
+ is not applied to the config"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of global device credential.", "INFO")
+ self.result.get("response")[0].get("globalCredential").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the absence of Global Device Credential."
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ self.have.clear()
+ self.want.clear()
+ return self
+
+
+def main():
+ """main entry point for module execution"""
+
+ # Define the specification for module arguments
+ element_spec = {
+ "dnac_host": {"type": 'str', "required": True},
+ "dnac_port": {"type": 'str', "default": '443'},
+ "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']},
+ "dnac_password": {"type": 'str', "no_log": True},
+ "dnac_verify": {"type": 'bool', "default": 'True'},
+ "dnac_version": {"type": 'str', "default": '2.2.3.3'},
+ "dnac_debug": {"type": 'bool', "default": False},
+ "dnac_log": {"type": 'bool', "default": False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ "config": {"type": 'list', "required": True, "elements": 'dict'},
+ "state": {"default": 'merged', "choices": ['merged', 'deleted']},
+ "validate_response_schema": {"type": 'bool', "default": True},
+ }
+
+ # Create an AnsibleModule object with argument specifications
+ module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False)
+ dnac_credential = DnacCredential(module)
+ state = dnac_credential.params.get("state")
+ config_verify = dnac_credential.params.get("config_verify")
+ if state not in dnac_credential.supported_states:
+ dnac_credential.status = "invalid"
+ dnac_credential.msg = "State {0} is invalid".format(state)
+ dnac_credential.check_return_status()
+
+ dnac_credential.validate_input().check_return_status()
+
+ for config in dnac_credential.config:
+ dnac_credential.reset_values()
+ dnac_credential.get_have(config).check_return_status()
+ if state != "deleted":
+ dnac_credential.get_want(config).check_return_status()
+ dnac_credential.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_credential.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**dnac_credential.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py
new file mode 100644
index 000000000..3db97ce05
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py
@@ -0,0 +1,2617 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to perform operations on device credentials in Cisco Catalyst Center."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan']
+
+DOCUMENTATION = r"""
+---
+module: device_credential_workflow_manager
+short_description: Resource module for Global Device Credentials and Assigning Credentials to sites.
+description:
+- Manage operations on Global Device Credentials and Assigning Credentials to sites.
+- API to create global device credentials.
+- API to update global device credentials.
+- API to delete global device credentials.
+- API to assign the device credential to the site.
+version_added: '6.7.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Muthu Rakesh (@MUTHU-RAKESH-27)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of global device credentials and site names.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ global_credential_details:
+ description: Manages global device credentials
+ type: dict
+ suboptions:
+ cli_credential:
+ description: Global Credential V2's cliCredential.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ enable_password:
+ description:
+ - cli_credential credential Enable Password.
+ - Password cannot contain spaces or angle brackets (< >)
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ password:
+ description:
+ - cli_credential credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ username:
+ description:
+ - cli_credential credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ https_read:
+ description: Global Credential V2's httpsRead.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ name:
+ description: Name. Required for creating the credential.
+ type: str
+ password:
+ description:
+ - https_read credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ port:
+ description: Port. Default port is 443.
+ type: int
+ username:
+ description:
+ - https_read credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ https_write:
+ description: Global Credential V2's httpsWrite.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ name:
+ description: Name. Required for creating the credential.
+ type: str
+ password:
+ description:
+ - https_write credential Password.
+ - Required for creating/updating the credential.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ port:
+ description: Port. Default port is 443.
+ type: int
+ username:
+ description:
+ - https_write credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description/Username.
+ type: str
+ old_username:
+ description: Old Username. Use this for updating the description/Username.
+ type: str
+ snmp_v2c_read:
+ description: Global Credential V2's snmpV2cRead.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ read_community:
+ description:
+ - snmp_v2c_read Read Community.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ snmp_v2c_write:
+ description: Global Credential V2's snmpV2cWrite.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Description. Required for creating the credential.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ write_community:
+ description:
+ - snmp_v2c_write Write Community.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ snmp_v3:
+ description: Global Credential V2's snmpV3.
+ type: list
+ elements: dict
+ suboptions:
+ auth_password:
+ description:
+ - snmp_v3 Auth Password.
+ - Password must contain minimum 8 characters.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ auth_type:
+ description: Auth Type. ["SHA", "MD5"].
+ type: str
+ description:
+ description:
+ - snmp_v3 Description.
+ - Should be unique from other snmp_v3 credentials.
+ type: str
+ id:
+ description: Credential Id. Use this for updating the device credential.
+ type: str
+ privacy_password:
+ description:
+ - snmp_v3 Privacy Password.
+ - Password must contain minimum 8 characters.
+ - Password cannot contain spaces or angle brackets (< >).
+ type: str
+ privacy_type:
+ description: Privacy Type. ["AES128", "AES192", "AES256"].
+ type: str
+ snmp_mode:
+ description: Snmp Mode. ["AUTHPRIV", "AUTHNOPRIV", "NOAUTHNOPRIV"].
+ type: str
+ username:
+ description:
+ - snmp_v3 credential Username.
+ - Username cannot contain spaces or angle brackets (< >).
+ type: str
+ old_description:
+ description: Old Description. Use this for updating the description.
+ type: str
+ assign_credentials_to_site:
+ description: Assign Device Credentials to Site.
+ type: dict
+ suboptions:
+ cli_credential:
+ description: CLI Credential.
+ type: dict
+ suboptions:
+ description:
+ description: CLI Credential Description.
+ type: str
+ username:
+ description: CLI Credential Username.
+ type: str
+ id:
+ description: CLI Credential Id. Use (Description, Username) or Id.
+ type: str
+ https_read:
+ description: HTTP(S) Read Credential
+ type: dict
+ suboptions:
+ description:
+ description: HTTP(S) Read Credential Description.
+ type: str
+ username:
+ description: HTTP(S) Read Credential Username.
+ type: str
+ id:
+ description: HTTP(S) Read Credential Id. Use (Description, Username) or Id.
+ type: str
+ https_write:
+ description: HTTP(S) Write Credential
+ type: dict
+ suboptions:
+ description:
+ description: HTTP(S) Write Credential Description.
+ type: str
+ username:
+ description: HTTP(S) Write Credential Username.
+ type: str
+ id:
+ description: HTTP(S) Write Credential Id. Use (Description, Username) or Id.
+ type: str
+ site_name:
+ description: Site Name to assign credential.
+ type: list
+ elements: str
+ snmp_v2c_read:
+ description: SNMPv2c Read Credential
+ type: dict
+ suboptions:
+ description:
+ description: SNMPv2c Read Credential Description.
+ type: str
+ id:
+ description: SNMPv2c Read Credential Id. Use Description or Id.
+ type: str
+ snmp_v2c_write:
+ description: SNMPv2c Write Credential
+ type: dict
+ suboptions:
+ description:
+ description: SNMPv2c Write Credential Description.
+ type: str
+ id:
+ description: SNMPv2c Write Credential Id. Use Description or Id.
+ type: str
+ snmp_v3:
+ description: snmp_v3 Credential
+ type: dict
+ suboptions:
+ description:
+ description: snmp_v3 Credential Description.
+ type: str
+ id:
+ description: snmp_v3 Credential Id. Use Description or Id.
+ type: str
+requirements:
+- dnacentersdk >= 2.5.5
+- python >= 3.5
+seealso:
+- name: Cisco Catalyst Center documentation for Discovery CreateGlobalCredentialsV2
+ description: Complete reference of the CreateGlobalCredentialsV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-global-credentials-v-2
+- name: Cisco Catalyst Center documentation for Discovery DeleteGlobalCredentialV2
+ description: Complete reference of the DeleteGlobalCredentialV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-global-credential-v-2
+- name: Cisco Catalyst Center documentation for Discovery UpdateGlobalCredentialsV2
+ description: Complete reference of the UpdateGlobalCredentialsV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-global-credentials-v-2
+- name: Cisco Catalyst Center documentation for Network Settings AssignDeviceCredentialToSiteV2
+ description: Complete reference of the AssignDeviceCredentialToSiteV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!assign-device-credential-to-site-v-2
+notes:
+ - SDK Method used are
+ discovery.Discovery.create_global_credentials_v2,
+ discovery.Discovery.delete_global_credential_v2,
+ discovery.Discovery.update_global_credentials_v2,
+ network_settings.NetworkSettings.assign_device_credential_to_site_v2,
+
+ - Paths used are
+ post /dna/intent/api/v2/global-credential,
+ delete /dna/intent/api/v2/global-credential/{id},
+ put /dna/intent/api/v2/global-credential,
+ post /dna/intent/api/v2/credential-to-site/{siteId},
+"""
+
+EXAMPLES = r"""
+---
+ - name: Create Credentials and assign it to a site.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ assign_credentials_to_site:
+ cli_credential:
+ id: string
+ snmp_v2c_read:
+ id: string
+ snmp_v2c_write:
+ id: string
+ snmp_v3:
+ id: string
+ https_read:
+ id: string
+ https_write:
+ id: string
+ site_name:
+ - string
+
+ - name: Create Multiple Credentials.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ - description: string
+ read_community: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ - description: string
+ write_community: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ - description: string
+ username: string
+ password: string
+ port: 443
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ - description: string
+ username: string
+ password: string
+ port: 443
+
+ - name: Update global device credentials using id
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ id: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ id: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+
+ - name: Update multiple global device credentials using id
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ id: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ id: string
+ - description: string
+ read_community: string
+ id: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ id: string
+ - description: string
+ write_community: string
+ id: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ - auth_password: string
+ auth_type: SHA
+ snmp_mode: AUTHPRIV
+ privacy_password: string
+ privacy_type: AES128
+ username: string
+ description: string
+ id: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+ - description: string
+ username: string
+ password: string
+ port: 443
+ id: string
+
+ - name: Update global device credential name/description using old name and description.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_credential_details:
+ cli_credential:
+ - description: string
+ username: string
+ password: string
+ enable_password: string
+ old_description: string
+ old_username: string
+ snmp_v2c_read:
+ - description: string
+ read_community: string
+ old_description: string
+ snmp_v2c_write:
+ - description: string
+ write_community: string
+ old_description: string
+ snmp_v3:
+ - auth_password: string
+ auth_type: string
+ snmp_mode: string
+ privacy_password: string
+ privacy_type: string
+ username: string
+ description: string
+ https_read:
+ - description: string
+ username: string
+ password: string
+ port: string
+ old_description: string
+ old_username: string
+ https_write:
+ - description: string
+ username: string
+ password: string
+ port: string
+ old_description: string
+ old_username: string
+
+ - name: Assign Credentials to sites using old description and username.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - assign_credentials_to_site:
+ cli_credential:
+ description: string
+ username: string
+ snmp_v2c_read:
+ description: string
+ snmp_v2c_write:
+ description: string
+ snmp_v3:
+ description: string
+ https_read:
+ description: string
+ username: string
+ https_write:
+ description: string
+ username: string
+ site_name:
+ - string
+ - string
+
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of global device credentials
+dnac_response1:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+
+# Case_2: Successful assignment of global device credentials to a site.
+dnac_response2:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+
+import copy
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+)
+
+
+class DeviceCredential(DnacBase):
+ """Class containing member attributes for device_credential_workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.result["response"] = [
+ {
+ "globalCredential": {},
+ "assignCredential": {}
+ }
+ ]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ # temp_spec is the specification for the expected structure of configuration parameters
+ temp_spec = {
+ "global_credential_details": {
+ "type": 'dict',
+ "cli_credential": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "enable_password": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_read": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "read_community": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_write": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "write_community": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v3": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "snmp_mode": {"type": 'string'},
+ "auth_type": {"type": 'string'},
+ "auth_password": {"type": 'string'},
+ "privacy_type": {"type": 'string'},
+ "privacy_password": {"type": 'string'},
+ "old_description": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_read": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "port": {"type": 'integer'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_write": {
+ "type": 'list',
+ "description": {"type": 'string'},
+ "username": {"type": 'string'},
+ "password": {"type": 'string'},
+ "port": {"type": 'integer'},
+ "old_description": {"type": 'string'},
+ "old_username": {"type": 'string'},
+ "id": {"type": 'string'},
+ }
+ },
+ "assign_credentials_to_site": {
+ "type": 'dict',
+ "cli_credential": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_read": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "snmp_v2c_write": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "id": {"type": 'string'},
+ },
+ "snmp_v3": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "id": {"type": 'string'},
+ },
+ "https_read": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "https_write": {
+ "type": 'dict',
+ "description": {"type: 'string'"},
+ "username": {"type": 'string'},
+ "id": {"type": 'string'},
+ },
+ "site_name": {
+ "type": 'list',
+ "elements": 'string'
+ }
+ }
+ }
+
+ # Validate playbook params against the specification (temp_spec)
+ valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input from the playbook"
+ self.status = "success"
+ return self
+
+ def get_site_id(self, site_name):
+ """
+ Get the site id from the site name.
+ Use check_return_status() to check for failure
+
+ Parameters:
+ site_name (str) - Site name
+
+ Returns:
+ str or None - The Site Id if found, or None if not found or error
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ self.log("Received API response from 'get_site': {0}".format(response), "DEBUG")
+ if not response:
+ self.log("Failed to retrieve the site ID for the site name: {0}"
+ .format(site_name), "ERROR")
+ return None
+
+ _id = response.get("response")[0].get("id")
+ self.log("Site ID for the site name {0}: {1}".format(site_name, _id), "INFO")
+ except Exception as exec:
+ self.log("Exception occurred while getting site_id from the site_name: {0}"
+ .format(exec), "CRITICAL")
+ return None
+
+ return _id
+
+ def get_global_credentials_params(self):
+ """
+ Get the current Global Device Credentials from Cisco Catalyst Center.
+
+ Parameters:
+ self - The current object details.
+
+ Returns:
+ global_credentials (dict) - All global device credentials details.
+ """
+
+ try:
+ global_credentials = self.dnac._exec(
+ family="discovery",
+ function='get_all_global_credentials_v2',
+ )
+ global_credentials = global_credentials.get("response")
+ self.log("All global device credentials details: {0}"
+ .format(global_credentials), "DEBUG")
+ except Exception as exec:
+ self.log("Exception occurred while getting global device credentials: {0}"
+ .format(exec), "CRITICAL")
+ return None
+
+ return global_credentials
+
+ def get_cli_params(self, cliDetails):
+ """
+ Format the CLI parameters for the CLI credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ cliDetails (list of dict) - Cisco Catalyst Center details containing CLI Credentials.
+
+ Returns:
+ cliCredential (list of dict) - Processed CLI credential data
+ in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ cliCredential = []
+ for item in cliDetails:
+ if item is None:
+ cliCredential.append(None)
+ else:
+ value = {
+ "username": item.get("username"),
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ cliCredential.append(value)
+ return cliCredential
+
+ def get_snmpV2cRead_params(self, snmpV2cReadDetails):
+ """
+ Format the snmpV2cRead parameters for the snmpV2cRead
+ credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ snmpV2cReadDetails (list of dict) - Cisco Catalyst Center
+ Details containing snmpV2cRead Credentials.
+
+ Returns:
+ snmpV2cRead (list of dict) - Processed snmpV2cRead credential
+ data in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ snmpV2cRead = []
+ for item in snmpV2cReadDetails:
+ if item is None:
+ snmpV2cRead.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ snmpV2cRead.append(value)
+ return snmpV2cRead
+
+ def get_snmpV2cWrite_params(self, snmpV2cWriteDetails):
+ """
+ Format the snmpV2cWrite parameters for the snmpV2cWrite
+ credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ snmpV2cWriteDetails (list of dict) - Cisco Catalyst Center
+ Details containing snmpV2cWrite Credentials.
+
+ Returns:
+ snmpV2cWrite (list of dict) - Processed snmpV2cWrite credential
+ data in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ snmpV2cWrite = []
+ for item in snmpV2cWriteDetails:
+ if item is None:
+ snmpV2cWrite.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "id": item.get("id")
+ }
+ snmpV2cWrite.append(value)
+ return snmpV2cWrite
+
+ def get_httpsRead_params(self, httpsReadDetails):
+ """
+ Format the httpsRead parameters for the httpsRead
+ credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ httpsReadDetails (list of dict) - Cisco Catalyst Center
+ Details containing httpsRead Credentials.
+
+ Returns:
+ httpsRead (list of dict) - Processed httpsRead credential
+ data in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ httpsRead = []
+ for item in httpsReadDetails:
+ if item is None:
+ httpsRead.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "username": item.get("username"),
+ "port": item.get("port"),
+ "id": item.get("id")
+ }
+ httpsRead.append(value)
+ return httpsRead
+
+ def get_httpsWrite_params(self, httpsWriteDetails):
+ """
+ Format the httpsWrite parameters for the httpsWrite
+ credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ httpsWriteDetails (list of dict) - Cisco Catalyst Center
+ Details containing httpsWrite Credentials.
+
+ Returns:
+ httpsWrite (list of dict) - Processed httpsWrite credential
+ data in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ httpsWrite = []
+ for item in httpsWriteDetails:
+ if item is None:
+ httpsWrite.append(None)
+ else:
+ value = {
+ "description": item.get("description"),
+ "username": item.get("username"),
+ "port": item.get("port"),
+ "id": item.get("id")
+ }
+ httpsWrite.append(value)
+ return httpsWrite
+
+ def get_snmpV3_params(self, snmpV3Details):
+ """
+ Format the snmpV3 parameters for the snmpV3 credential configuration in Cisco Catalyst Center.
+
+ Parameters:
+ snmpV3Details (list of dict) - Cisco Catalyst Center details containing snmpV3 Credentials.
+
+ Returns:
+ snmpV3 (list of dict) - Processed snmpV3 credential
+ data in the format suitable for the Cisco Catalyst Center config.
+ """
+
+ snmpV3 = []
+ for item in snmpV3Details:
+ if item is None:
+ snmpV3.append(None)
+ else:
+ value = {
+ "username": item.get("username"),
+ "description": item.get("description"),
+ "snmpMode": item.get("snmpMode"),
+ "id": item.get("id"),
+ }
+ if value.get("snmpMode") == "AUTHNOPRIV":
+ value["authType"] = item.get("authType")
+ elif value.get("snmpMode") == "AUTHPRIV":
+ value.update({
+ "authType": item.get("authType"),
+ "privacyType": item.get("privacyType")
+ })
+ snmpV3.append(value)
+ return snmpV3
+
+ def get_cli_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current CLI Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ cliDetails (List) - The current CLI credentials.
+ """
+
+ # playbook CLI Credential details
+ all_CLI = CredentialDetails.get("cli_credential")
+ # All CLI details from Cisco Catalyst Center
+ cli_details = global_credentials.get("cliCredential")
+ # Cisco Catalyst Center details for the CLI Credential given in the playbook
+ cliDetails = []
+ if all_CLI and cli_details:
+ for cliCredential in all_CLI:
+ cliDetail = None
+ cliId = cliCredential.get("id")
+ if cliId:
+ cliDetail = get_dict_result(cli_details, "id", cliId)
+ if not cliDetail:
+ self.msg = "CLI credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ cliOldDescription = cliCredential.get("old_description")
+ cliOldUsername = cliCredential.get("old_username")
+ if cliOldDescription and cliOldUsername and (not cliDetail):
+ for item in cli_details:
+ if item.get("description") == cliOldDescription \
+ and item.get("username") == cliOldUsername:
+ if cliDetail:
+ self.msg = "More than one CLI credential with same \
+ old_description and old_username. Pass ID."
+ self.status = "failed"
+ return self
+ cliDetail = item
+ if not cliDetail:
+ self.msg = "CLI credential old_description or old_username is invalid"
+ self.status = "failed"
+ return self
+
+ cliDescription = cliCredential.get("description")
+ cliUsername = cliCredential.get("username")
+ if cliDescription and cliUsername and (not cliDetail):
+ for item in cli_details:
+ if item.get("description") == cliDescription \
+ and item.get("username") == cliUsername:
+ if cliDetail:
+ self.msg = "More than one CLI Credential with same \
+ description and username. Pass ID."
+ self.status = "failed"
+ return self
+ cliDetail = item
+ cliDetails.append(cliDetail)
+ return cliDetails
+
+ def get_snmpV2cRead_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV2cRead Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV2cReadDetails (List) - The current snmpV2cRead.
+ """
+
+ # Playbook snmpV2cRead Credential details
+ all_snmpV2cRead = CredentialDetails.get("snmp_v2c_read")
+ # All snmpV2cRead details from the Cisco Catalyst Center
+ snmpV2cRead_details = global_credentials.get("snmpV2cRead")
+ # Cisco Catalyst Center details for the snmpV2cRead Credential given in the playbook
+ snmpV2cReadDetails = []
+ if all_snmpV2cRead and snmpV2cRead_details:
+ for snmpV2cReadCredential in all_snmpV2cRead:
+ snmpV2cReadDetail = None
+ snmpV2cReadId = snmpV2cReadCredential.get("id")
+ if snmpV2cReadId:
+ snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
+ if not snmpV2cReadDetail:
+ self.msg = "snmpV2cRead credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cReadOldDescription = snmpV2cReadCredential.get("old_description")
+ if snmpV2cReadOldDescription and (not snmpV2cReadDetail):
+ snmpV2cReadDetail = get_dict_result(
+ snmpV2cRead_details,
+ "description",
+ snmpV2cReadOldDescription
+ )
+ if not snmpV2cReadDetail:
+ self.msg = "snmpV2cRead credential old_description is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cReadDescription = snmpV2cReadCredential.get("description")
+ if snmpV2cReadDescription and (not snmpV2cReadDetail):
+ snmpV2cReadDetail = get_dict_result(
+ snmpV2cRead_details,
+ "description",
+ snmpV2cReadDescription
+ )
+ snmpV2cReadDetails.append(snmpV2cReadDetail)
+ return snmpV2cReadDetails
+
+ def get_snmpV2cWrite_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV2cWrite Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV2cWriteDetails (List) - The current snmpV2cWrite.
+ """
+
+ # Playbook snmpV2cWrite Credential details
+ all_snmpV2cWrite = CredentialDetails.get("snmp_v2c_write")
+ # All snmpV2cWrite details from the Cisco Catalyst Center
+ snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
+ # Cisco Catalyst Center details for the snmpV2cWrite Credential given in the playbook
+ snmpV2cWriteDetails = []
+ if all_snmpV2cWrite and snmpV2cWrite_details:
+ for snmpV2cWriteCredential in all_snmpV2cWrite:
+ snmpV2cWriteDetail = None
+ snmpV2cWriteId = snmpV2cWriteCredential.get("id")
+ if snmpV2cWriteId:
+ snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
+ if not snmpV2cWriteDetail:
+ self.msg = "snmpV2cWrite credential ID is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV2cWriteOldDescription = snmpV2cWriteCredential.get("old_description")
+ if snmpV2cWriteOldDescription and (not snmpV2cWriteDetail):
+ snmpV2cWriteDetail = get_dict_result(
+ snmpV2cWrite_details,
+ "description",
+ snmpV2cWriteOldDescription
+ )
+ if not snmpV2cWriteDetail:
+ self.msg = "snmpV2cWrite credential old_description is invalid "
+ self.status = "failed"
+ return self
+
+ snmpV2cWriteDescription = snmpV2cWriteCredential.get("description")
+ if snmpV2cWriteDescription and (not snmpV2cWriteDetail):
+ snmpV2cWriteDetail = get_dict_result(
+ snmpV2cWrite_details,
+ "description",
+ snmpV2cWriteDescription
+ )
+ snmpV2cWriteDetails.append(snmpV2cWriteDetail)
+ return snmpV2cWriteDetails
+
+ def get_httpsRead_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current httpsRead Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ httpsReadDetails (List) - The current httpsRead.
+ """
+
+ # Playbook httpsRead Credential details
+ all_httpsRead = CredentialDetails.get("https_read")
+ # All httpsRead details from the Cisco Catalyst Center
+ httpsRead_details = global_credentials.get("httpsRead")
+ # Cisco Catalyst Center details for the httpsRead Credential given in the playbook
+ httpsReadDetails = []
+ if all_httpsRead and httpsRead_details:
+ for httpsReadCredential in all_httpsRead:
+ httpsReadDetail = None
+ httpsReadId = httpsReadCredential.get("id")
+ if httpsReadId:
+ httpsReadDetail = get_dict_result(httpsRead_details, "id", httpsReadId)
+ if not httpsReadDetail:
+ self.msg = "httpsRead credential Id is invalid"
+ self.status = "failed"
+ return self
+
+ httpsReadOldDescription = httpsReadCredential.get("old_description")
+ httpsReadOldUsername = httpsReadCredential.get("old_username")
+ if httpsReadOldDescription and httpsReadOldUsername and (not httpsReadDetail):
+ for item in httpsRead_details:
+ if item.get("description") == httpsReadOldDescription \
+ and item.get("username") == httpsReadOldUsername:
+ if httpsReadDetail:
+ self.msg = "More than one httpsRead credential with same \
+ old_description and old_username. Pass ID."
+ self.status = "failed"
+ return self
+ httpsReadDetail = item
+ if not httpsReadDetail:
+ self.msg = "httpsRead credential old_description or old_username is invalid"
+ self.status = "failed"
+ return self
+
+ httpsReadDescription = httpsReadCredential.get("description")
+ httpsReadUsername = httpsReadCredential.get("username")
+ if httpsReadDescription and httpsReadUsername and (not httpsReadDetail):
+ for item in httpsRead_details:
+ if item.get("description") == httpsReadDescription \
+ and item.get("username") == httpsReadUsername:
+ if httpsReadDetail:
+ self.msg = "More than one httpsRead credential with same \
+ description and username. Pass ID."
+ self.status = "failed"
+ return self
+ httpsReadDetail = item
+ httpsReadDetails.append(httpsReadDetail)
+ return httpsReadDetails
+
+ def get_httpsWrite_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current httpsWrite Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ httpsWriteDetails (List) - The current httpsWrite.
+ """
+
+ # Playbook httpsWrite Credential details
+ all_httpsWrite = CredentialDetails.get("https_write")
+ # All httpsWrite details from the Cisco Catalyst Center
+ httpsWrite_details = global_credentials.get("httpsWrite")
+ # Cisco Catalyst Center details for the httpsWrite Credential given in the playbook
+ httpsWriteDetails = []
+ if all_httpsWrite and httpsWrite_details:
+ for httpsWriteCredential in all_httpsWrite:
+ httpsWriteDetail = None
+ httpsWriteId = httpsWriteCredential.get("id")
+ if httpsWriteId:
+ httpsWriteDetail = get_dict_result(httpsWrite_details, "id", httpsWriteId)
+ if not httpsWriteDetail:
+ self.msg = "httpsWrite credential Id is invalid"
+ self.status = "failed"
+ return self
+
+ httpsWriteOldDescription = httpsWriteCredential.get("old_description")
+ httpsWriteOldUsername = httpsWriteCredential.get("old_username")
+ if httpsWriteOldDescription and httpsWriteOldUsername and (not httpsWriteDetail):
+ for item in httpsWrite_details:
+ if item.get("description") == httpsWriteOldDescription \
+ and item.get("username") == httpsWriteOldUsername:
+ if httpsWriteDetail:
+ self.msg = "More than one httpsWrite credential with same \
+ old_description and old_username. Pass ID"
+ self.status = "failed"
+ return self
+ httpsWriteDetail = item
+ if not httpsWriteDetail:
+ self.msg = "httpsWrite credential old_description or \
+ old_username is invalid"
+ self.status = "failed"
+ return self
+
+ httpsWriteDescription = httpsWriteCredential.get("description")
+ httpsWriteUsername = httpsWriteCredential.get("username")
+ if httpsWriteDescription and httpsWriteUsername and (not httpsWriteDetail):
+ for item in httpsWrite_details:
+ if item.get("description") == httpsWriteDescription \
+ and item.get("username") == httpsWriteUsername:
+ httpsWriteDetail = item
+ httpsWriteDetails.append(httpsWriteDetail)
+ return httpsWriteDetails
+
+ def get_snmpV3_credentials(self, CredentialDetails, global_credentials):
+ """
+ Get the current snmpV3 Credential from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+ global_credentials (dict) - All global device credentials details.
+
+ Returns:
+ snmpV3Details (List) - The current snmpV3.
+ """
+
+ # Playbook snmpV3 Credential details
+ all_snmpV3 = CredentialDetails.get("snmp_v3")
+ # All snmpV3 details from the Cisco Catalyst Center
+ snmpV3_details = global_credentials.get("snmpV3")
+ # Cisco Catalyst Center details for the snmpV3 Credential given in the playbook
+ snmpV3Details = []
+ if all_snmpV3 and snmpV3_details:
+ for snmpV3Credential in all_snmpV3:
+ snmpV3Detail = None
+ snmpV3Id = snmpV3Credential.get("id")
+ if snmpV3Id:
+ snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
+ if not snmpV3Detail:
+ self.msg = "snmpV3 credential id is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV3OldDescription = snmpV3Credential.get("old_description")
+ if snmpV3OldDescription and (not snmpV3Detail):
+ snmpV3Detail = get_dict_result(snmpV3_details,
+ "description", snmpV3OldDescription)
+ if not snmpV3Detail:
+ self.msg = "snmpV3 credential old_description is invalid"
+ self.status = "failed"
+ return self
+
+ snmpV3Description = snmpV3Credential.get("description")
+ if snmpV3Description and (not snmpV3Detail):
+ snmpV3Detail = get_dict_result(snmpV3_details, "description", snmpV3Description)
+ snmpV3Details.append(snmpV3Detail)
+ return snmpV3Details
+
+ def get_have_device_credentials(self, CredentialDetails):
+ """
+ Get the current Global Device Credentials from
+ Cisco Catalyst Center based on the provided playbook details.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ global_credentials = self.get_global_credentials_params()
+ cliDetails = self.get_cli_credentials(CredentialDetails, global_credentials)
+ snmpV2cReadDetails = self.get_snmpV2cRead_credentials(CredentialDetails, global_credentials)
+ snmpV2cWriteDetails = self.get_snmpV2cWrite_credentials(CredentialDetails,
+ global_credentials)
+ httpsReadDetails = self.get_httpsRead_credentials(CredentialDetails, global_credentials)
+ httpsWriteDetails = self.get_httpsWrite_credentials(CredentialDetails, global_credentials)
+ snmpV3Details = self.get_snmpV3_credentials(CredentialDetails, global_credentials)
+ self.have.update({"globalCredential": {}})
+ if cliDetails:
+ cliCredential = self.get_cli_params(cliDetails)
+ self.have.get("globalCredential").update({"cliCredential": cliCredential})
+ if snmpV2cReadDetails:
+ snmpV2cRead = self.get_snmpV2cRead_params(snmpV2cReadDetails)
+ self.have.get("globalCredential").update({"snmpV2cRead": snmpV2cRead})
+ if snmpV2cWriteDetails:
+ snmpV2cWrite = self.get_snmpV2cWrite_params(snmpV2cWriteDetails)
+ self.have.get("globalCredential").update({"snmpV2cWrite": snmpV2cWrite})
+ if httpsReadDetails:
+ httpsRead = self.get_httpsRead_params(httpsReadDetails)
+ self.have.get("globalCredential").update({"httpsRead": httpsRead})
+ if httpsWriteDetails:
+ httpsWrite = self.get_httpsWrite_params(httpsWriteDetails)
+ self.have.get("globalCredential").update({"httpsWrite": httpsWrite})
+ if snmpV3Details:
+ snmpV3 = self.get_snmpV3_params(snmpV3Details)
+ self.have.get("globalCredential").update({"snmpV3": snmpV3})
+
+ self.log("Global device credential details: {0}"
+ .format(self.have.get("globalCredential")), "DEBUG")
+ self.msg = "Collected the Global Device Credential Details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current Global Device Credentials and
+ Device Credentials assigned to a site in Cisco Catalyst Center.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device
+ Credentials configurations and Device Credentials should
+ be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of Global
+ Device Credentials and Device Credentials assigned to a site.
+ """
+
+ if config.get("global_credential_details") is not None:
+ CredentialDetails = config.get("global_credential_details")
+ self.get_have_device_credentials(CredentialDetails).check_return_status()
+
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.msg = "Successfully retrieved the details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_want_device_credentials(self, CredentialDetails):
+ """
+ Get the Global Device Credentials from the playbook.
+ Check this API using the check_return_status.
+
+ Parameters:
+ CredentialDetails (dict) - Playbook details containing Global Device Credentials.
+
+ Returns:
+ self - The current object with updated information of
+ Global Device Credentials from the playbook.
+ """
+
+ want = {
+ "want_create": {},
+ "want_update": {}
+ }
+ if CredentialDetails.get("cli_credential"):
+ cli = CredentialDetails.get("cli_credential")
+ have_cli_ptr = 0
+ create_cli_ptr = 0
+ update_cli_ptr = 0
+ values = ["password", "description", "username", "id"]
+ have_cliCredential = self.have.get("globalCredential").get("cliCredential")
+ for item in cli:
+ if not have_cliCredential or have_cliCredential[have_cli_ptr] is None:
+ if want.get("want_create").get("cliCredential") is None:
+ want.get("want_create").update({"cliCredential": []})
+ create_credential = want.get("want_create").get("cliCredential")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_cli_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ cliCredential " + str(have_cli_ptr)
+ self.status = "failed"
+ return self
+
+ if item.get("enable_password"):
+ create_credential[create_cli_ptr] \
+ .update({"enablePassword": item.get("enable_password")})
+ create_cli_ptr = create_cli_ptr + 1
+ else:
+ if want.get("want_update").get("cliCredential") is None:
+ want.get("want_update").update({"cliCredential": []})
+ update_credential = want.get("want_update").get("cliCredential")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_cli_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for udpating \
+ cliCredential " + str(have_cli_ptr)
+ self.status = "failed"
+ return self
+
+ for i in range(1, 4):
+ if item.get(values[i]):
+ update_credential[update_cli_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_cli_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("cliCredential")[have_cli_ptr].get(values[i])
+ })
+
+ if item.get("enable_password"):
+ update_credential[update_cli_ptr].update({
+ "enablePassword": item.get("enable_password")
+ })
+ update_cli_ptr = update_cli_ptr + 1
+ have_cli_ptr = have_cli_ptr + 1
+
+ if CredentialDetails.get("snmp_v2c_read"):
+ snmpV2cRead = CredentialDetails.get("snmp_v2c_read")
+ have_snmpv2cread_ptr = 0
+ create_snmpv2cread_ptr = 0
+ update_snmpv2cread_ptr = 0
+ values = ["read_community", "description", "id"]
+ keys = ["readCommunity", "description", "id"]
+ have_snmpV2cRead = self.have.get("globalCredential").get("snmpV2cRead")
+ for item in snmpV2cRead:
+ if not have_snmpV2cRead or have_snmpV2cRead[have_snmpv2cread_ptr] is None:
+ if want.get("want_create").get("snmpV2cRead") is None:
+ want.get("want_create").update({"snmpV2cRead": []})
+ create_credential = want.get("want_create").get("snmpV2cRead")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv2cread_ptr] \
+ .update({keys[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.status = "failed"
+ return self
+ create_snmpv2cread_ptr = create_snmpv2cread_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV2cRead") is None:
+ want.get("want_update").update({"snmpV2cRead": []})
+ update_credential = want.get("want_update").get("snmpV2cRead")
+ update_credential.append({})
+ if item.get("read_community"):
+ update_credential[update_snmpv2cread_ptr] \
+ .update({"readCommunity": item.get("read_community")})
+ else:
+ self.msg = "read_community is mandatory for updating \
+ snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 3):
+ if item.get(values[i]):
+ update_credential[update_snmpv2cread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_snmpv2cread_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("snmpV2cRead")[have_snmpv2cread_ptr].get(values[i])
+ })
+ update_snmpv2cread_ptr = update_snmpv2cread_ptr + 1
+ have_snmpv2cread_ptr = have_snmpv2cread_ptr + 1
+
+ if CredentialDetails.get("snmp_v2c_write"):
+ snmpV2cWrite = CredentialDetails.get("snmp_v2c_write")
+ have_snmpv2cwrite_ptr = 0
+ create_snmpv2cwrite_ptr = 0
+ update_snmpv2cwrite_ptr = 0
+ values = ["write_community", "description", "id"]
+ keys = ["writeCommunity", "description", "id"]
+ have_snmpV2cWrite = self.have.get("globalCredential").get("snmpV2cWrite")
+ for item in snmpV2cWrite:
+ if not have_snmpV2cWrite or have_snmpV2cWrite[have_snmpv2cwrite_ptr] is None:
+ if want.get("want_create").get("snmpV2cWrite") is None:
+ want.get("want_create").update({"snmpV2cWrite": []})
+ create_credential = want.get("want_create").get("snmpV2cWrite")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv2cwrite_ptr] \
+ .update({keys[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.status = "failed"
+ return self
+ create_snmpv2cwrite_ptr = create_snmpv2cwrite_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV2cWrite") is None:
+ want.get("want_update").update({"snmpV2cWrite": []})
+ update_credential = want.get("want_update").get("snmpV2cWrite")
+ update_credential.append({})
+ if item.get("write_community"):
+ update_credential[update_snmpv2cwrite_ptr] \
+ .update({"writeCommunity": item.get("write_community")})
+ else:
+ self.msg = "write_community is mandatory for updating \
+ snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 3):
+ if item.get(values[i]):
+ update_credential[update_snmpv2cwrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_snmpv2cwrite_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("snmpV2cWrite")[have_snmpv2cwrite_ptr].get(values[i])
+ })
+ update_snmpv2cwrite_ptr = update_snmpv2cwrite_ptr + 1
+ have_snmpv2cwrite_ptr = have_snmpv2cwrite_ptr + 1
+
+ if CredentialDetails.get("https_read"):
+ httpsRead = CredentialDetails.get("https_read")
+ have_httpsread_ptr = 0
+ create_httpsread_ptr = 0
+ update_httpsread_ptr = 0
+ values = ["password", "description", "username", "id", "port"]
+ have_httpsRead = self.have.get("globalCredential").get("httpsRead")
+ for item in httpsRead:
+ self.log("Global credentials details: {0}"
+ .format(self.have.get("globalCredential")), "DEBUG")
+ if not have_httpsRead or have_httpsRead[have_httpsread_ptr] is None:
+ if want.get("want_create").get("httpsRead") is None:
+ want.get("want_create").update({"httpsRead": []})
+ create_credential = want.get("want_create").get("httpsRead")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_httpsread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ httpsRead " + str(have_httpsread_ptr)
+ self.status = "failed"
+ return self
+ if item.get("port"):
+ create_credential[create_httpsread_ptr] \
+ .update({"port": item.get("port")})
+ else:
+ create_credential[create_httpsread_ptr] \
+ .update({"port": "443"})
+ create_httpsread_ptr = create_httpsread_ptr + 1
+ else:
+ if want.get("want_update").get("httpsRead") is None:
+ want.get("want_update").update({"httpsRead": []})
+ update_credential = want.get("want_update").get("httpsRead")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_httpsread_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for updating \
+ httpsRead " + str(have_httpsread_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 5):
+ if item.get(values[i]):
+ update_credential[update_httpsread_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_httpsread_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("httpsRead")[have_httpsread_ptr].get(values[i])
+ })
+ update_httpsread_ptr = update_httpsread_ptr + 1
+ have_httpsread_ptr = have_httpsread_ptr + 1
+
+ if CredentialDetails.get("https_write"):
+ httpsWrite = CredentialDetails.get("https_write")
+ have_httpswrite_ptr = 0
+ create_httpswrite_ptr = 0
+ update_httpswrite_ptr = 0
+ values = ["password", "description", "username", "id", "port"]
+ have_httpsWrite = self.have.get("globalCredential").get("httpsWrite")
+ for item in httpsWrite:
+ if not have_httpsWrite or have_httpsWrite[have_httpswrite_ptr] is None:
+ if want.get("want_create").get("httpsWrite") is None:
+ want.get("want_create").update({"httpsWrite": []})
+ create_credential = want.get("want_create").get("httpsWrite")
+ create_credential.append({})
+ for i in range(0, 3):
+ if item.get(values[i]):
+ create_credential[create_httpswrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ httpsWrite " + str(have_httpswrite_ptr)
+ self.status = "failed"
+ return self
+ if item.get("port"):
+ create_credential[create_httpswrite_ptr] \
+ .update({"port": item.get("port")})
+ else:
+ create_credential[create_httpswrite_ptr] \
+ .update({"port": "443"})
+ create_httpswrite_ptr = create_httpswrite_ptr + 1
+ else:
+ if want.get("want_update").get("httpsWrite") is None:
+ want.get("want_update").update({"httpsWrite": []})
+ update_credential = want.get("want_update").get("httpsWrite")
+ update_credential.append({})
+ if item.get("password"):
+ update_credential[update_httpswrite_ptr] \
+ .update({"password": item.get("password")})
+ else:
+ self.msg = "password is mandatory for updating \
+ httpsRead " + str(have_httpswrite_ptr)
+ self.status = "failed"
+ return self
+ for i in range(1, 5):
+ if item.get(values[i]):
+ update_credential[update_httpswrite_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ update_credential[update_httpswrite_ptr].update({
+ values[i]: self.have.get("globalCredential")
+ .get("httpsWrite")[have_httpswrite_ptr].get(values[i])
+ })
+ update_httpswrite_ptr = update_httpswrite_ptr + 1
+ have_httpswrite_ptr = have_httpswrite_ptr + 1
+
+ if CredentialDetails.get("snmp_v3"):
+ snmpV3 = CredentialDetails.get("snmp_v3")
+ have_snmpv3_ptr = 0
+ create_snmpv3_ptr = 0
+ update_snmpv3_ptr = 0
+ values = ["description", "username", "id"]
+ have_snmpV3 = self.have.get("globalCredential").get("snmpV3")
+ for item in snmpV3:
+ if not have_snmpV3 or have_snmpV3[have_snmpv3_ptr] is None:
+ if want.get("want_create").get("snmpV3") is None:
+ want.get("want_create").update({"snmpV3": []})
+ create_credential = want.get("want_create").get("snmpV3")
+ create_credential.append({})
+ for i in range(0, 2):
+ if item.get(values[i]):
+ create_credential[create_snmpv3_ptr] \
+ .update({values[i]: item.get(values[i])})
+ else:
+ self.msg = values[i] + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("snmp_mode"):
+ create_credential[create_snmpv3_ptr] \
+ .update({"snmpMode": item.get("snmp_mode")})
+ else:
+ create_credential[create_snmpv3_ptr] \
+ .update({"snmpMode": "AUTHPRIV"})
+ if create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHNOPRIV" or \
+ create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ auths = ["auth_password", "auth_type"]
+ keys = {
+ "auth_password": "authPassword",
+ "auth_type": "authType"
+ }
+ for auth in auths:
+ if item.get(auth):
+ create_credential[create_snmpv3_ptr] \
+ .update({keys[auth]: item.get(auth)})
+ else:
+ self.msg = auth + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("auth_password")) < 8:
+ self.msg = "auth_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ self.log("snmp_mode: {0}".format(create_credential[create_snmpv3_ptr]
+ .get("snmpMode")), "DEBUG")
+ if create_credential[create_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ privs = ["privacy_password", "privacy_type"]
+ key = {
+ "privacy_password": "privacyPassword",
+ "privacy_type": "privacyType"
+ }
+ for priv in privs:
+ if item.get(priv):
+ create_credential[create_snmpv3_ptr] \
+ .update({key[priv]: item.get(priv)})
+ else:
+ self.msg = priv + " is mandatory for creating \
+ snmpV3 " + str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("privacy_password")) < 8:
+ self.msg = "privacy_password should be greater than 8"
+ self.status = "failed"
+ return self
+ elif create_credential[create_snmpv3_ptr].get("snmpMode") != "NOAUTHNOPRIV":
+ self.msg = "snmp_mode in snmpV3 is not \
+ ['AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV']"
+ self.status = "failed"
+ return self
+ create_snmpv3_ptr = create_snmpv3_ptr + 1
+ else:
+ if want.get("want_update").get("snmpV3") is None:
+ want.get("want_update").update({"snmpV3": []})
+ update_credential = want.get("want_update").get("snmpV3")
+ update_credential.append({})
+ for value in values:
+ if item.get(value):
+ update_credential[update_snmpv3_ptr] \
+ .update({value: item.get(value)})
+ else:
+ update_credential[update_snmpv3_ptr].update({
+ value: self.have.get("globalCredential")
+ .get("snmpV3")[have_snmpv3_ptr].get(value)
+ })
+ if item.get("snmp_mode"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"snmpMode": item.get("snmp_mode")})
+ if update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHNOPRIV" or \
+ update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ if item.get("auth_type"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"authType": item.get("auth_type")})
+ elif self.have.get("globalCredential") \
+ .get("snmpMode")[have_snmpv3_ptr].get("authType"):
+ update_credential[update_snmpv3_ptr].update({
+ "authType": self.have.get("globalCredential")
+ .get("snmpMode")[have_snmpv3_ptr].get("authType")
+ })
+ else:
+ self.msg = "auth_type is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("auth_password"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"authPassword": item.get("auth_password")})
+ else:
+ self.msg = "auth_password is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("auth_password")) < 8:
+ self.msg = "auth_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ elif update_credential[update_snmpv3_ptr].get("snmpMode") == "AUTHPRIV":
+ if item.get("privacy_type"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"privacyType": item.get("privacy_type")})
+ elif self.have.get("globalCredential") \
+ .get("snmpMode")[have_snmpv3_ptr].get("privacyType"):
+ update_credential[update_snmpv3_ptr].update({
+ "privacyType": self.have.get("globalCredential")
+ .get("snmpMode")[have_snmpv3_ptr].get("privacyType")
+ })
+ else:
+ self.msg = "privacy_type is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if item.get("privacy_password"):
+ update_credential[update_snmpv3_ptr] \
+ .update({"privacyPassword": item.get("privacy_password")})
+ else:
+ self.msg = "privacy_password is required for updating snmpV3 " + \
+ str(have_snmpv3_ptr)
+ self.status = "failed"
+ return self
+ if len(item.get("privacy_password")) < 8:
+ self.msg = "privacy_password length should be greater than 8"
+ self.status = "failed"
+ return self
+ update_snmpv3_ptr = update_snmpv3_ptr + 1
+ have_snmpv3_ptr = have_snmpv3_ptr + 1
+ self.want.update(want)
+ self.msg = "Collected the Global Credentials from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_want_assign_credentials(self, AssignCredentials):
+ """
+ Get the Credentials to be assigned to a site from the playbook.
+ Check this API using the check_return_status.
+
+ Parameters:
+ AssignCredentials (dict) - Playbook details containing
+ credentials that need to be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of credentials
+ that need to be assigned to a site from the playbook.
+ """
+ want = {
+ "assign_credentials": {}
+ }
+ site_name = AssignCredentials.get("site_name")
+ if not site_name:
+ self.msg = "site_name is required for AssignCredentials"
+ self.status = "failed"
+ return self
+ site_id = []
+ for site_name in site_name:
+ siteId = self.get_site_id(site_name)
+ if not site_name:
+ self.msg = "site_name is invalid in AssignCredentials"
+ self.status = "failed"
+ return self
+ site_id.append(siteId)
+ want.update({"site_id": site_id})
+ global_credentials = self.get_global_credentials_params()
+ cli_credential = AssignCredentials.get("cli_credential")
+ if cli_credential:
+ cliId = cli_credential.get("id")
+ cliDescription = cli_credential.get("description")
+ cliUsername = cli_credential.get("username")
+
+ if cliId or cliDescription and cliUsername:
+ # All CLI details from the Cisco Catalyst Center
+ cli_details = global_credentials.get("cliCredential")
+ if not cli_details:
+ self.msg = "Global CLI credential is not available"
+ self.status = "failed"
+ return self
+ cliDetail = None
+ if cliId:
+ cliDetail = get_dict_result(cli_details, "id", cliId)
+ if not cliDetail:
+ self.msg = "The ID for the CLI credential is not valid."
+ self.status = "failed"
+ return self
+ elif cliDescription and cliUsername:
+ for item in cli_details:
+ if item.get("description") == cliDescription and \
+ item.get("username") == cliUsername:
+ cliDetail = item
+ if not cliDetail:
+ self.msg = "The username and description of the CLI credential are invalid"
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"cliId": cliDetail.get("id")})
+
+ snmp_v2c_read = AssignCredentials.get("snmp_v2c_read")
+ if snmp_v2c_read:
+ snmpV2cReadId = snmp_v2c_read.get("id")
+ snmpV2cReadDescription = snmp_v2c_read.get("description")
+ if snmpV2cReadId or snmpV2cReadDescription:
+
+ # All snmpV2cRead details from the Cisco Catalyst Center
+ snmpV2cRead_details = global_credentials.get("snmpV2cRead")
+ if not snmpV2cRead_details:
+ self.msg = "Global snmpV2cRead credential is not available"
+ self.status = "failed"
+ return self
+ snmpV2cReadDetail = None
+ if snmpV2cReadId:
+ snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
+ if not snmpV2cReadDetail:
+ self.msg = "The ID of the snmpV2cRead credential is not valid."
+ self.status = "failed"
+ return self
+ elif snmpV2cReadDescription:
+ for item in snmpV2cRead_details:
+ if item.get("description") == snmpV2cReadDescription:
+ snmpV2cReadDetail = item
+ if not snmpV2cReadDetail:
+ self.msg = "The username and description for the snmpV2cRead credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV2ReadId": snmpV2cReadDetail.get("id")})
+
+ snmp_v2c_write = AssignCredentials.get("snmp_v2c_write")
+ if snmp_v2c_write:
+ snmpV2cWriteId = snmp_v2c_write.get("id")
+ snmpV2cWriteDescription = snmp_v2c_write.get("description")
+ if snmpV2cWriteId or snmpV2cWriteDescription:
+
+ # All snmpV2cWrite details from the Cisco Catalyst Center
+ snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
+ if not snmpV2cWrite_details:
+ self.msg = "Global snmpV2cWrite Credential is not available"
+ self.status = "failed"
+ return self
+ snmpV2cWriteDetail = None
+ if snmpV2cWriteId:
+ snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
+ if not snmpV2cWriteDetail:
+ self.msg = "The ID of the snmpV2cWrite credential is invalid."
+ self.status = "failed"
+ return self
+ elif snmpV2cWriteDescription:
+ for item in snmpV2cWrite_details:
+ if item.get("description") == snmpV2cWriteDescription:
+ snmpV2cWriteDetail = item
+ if not snmpV2cWriteDetail:
+ self.msg = "The username and description of the snmpV2cWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV2WriteId": snmpV2cWriteDetail.get("id")})
+
+ https_read = AssignCredentials.get("https_read")
+ if https_read:
+ httpReadId = https_read.get("id")
+ httpReadDescription = https_read.get("description")
+ httpReadUsername = https_read.get("username")
+ if httpReadId or httpReadDescription and httpReadUsername:
+
+ # All httpRead details from the Cisco Catalyst Center
+ httpRead_details = global_credentials.get("httpsRead")
+ if not httpRead_details:
+ self.msg = "Global httpRead Credential is not available."
+ self.status = "failed"
+ return self
+ httpReadDetail = None
+ if httpReadId:
+ httpReadDetail = get_dict_result(httpRead_details, "id", httpReadId)
+ if not httpReadDetail:
+ self.msg = "The ID of the httpRead credential is not valid."
+ self.status = "failed"
+ return self
+ elif httpReadDescription and httpReadUsername:
+ for item in httpRead_details:
+ if item.get("description") == httpReadDescription and \
+ item.get("username") == httpReadUsername:
+ httpReadDetail = item
+ if not httpReadDetail:
+ self.msg = "The description and username for the httpRead credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"httpRead": httpReadDetail.get("id")})
+
+ https_write = AssignCredentials.get("https_write")
+ if https_write:
+ httpWriteId = https_write.get("id")
+ httpWriteDescription = https_write.get("description")
+ httpWriteUsername = https_write.get("username")
+ if httpWriteId or httpWriteDescription and httpWriteUsername:
+
+ # All httpWrite details from the Cisco Catalyst Center
+ httpWrite_details = global_credentials.get("httpsWrite")
+ if not httpWrite_details:
+ self.msg = "Global httpWrite credential is not available."
+ self.status = "failed"
+ return self
+ httpWriteDetail = None
+ if httpWriteId:
+ httpWriteDetail = get_dict_result(httpWrite_details, "id", httpWriteId)
+ if not httpWriteDetail:
+ self.msg = "The ID of the httpWrite credential is not valid."
+ self.status = "failed"
+ return self
+ elif httpWriteDescription and httpWriteUsername:
+ for item in httpWrite_details:
+ if item.get("description") == httpWriteDescription and \
+ item.get("username") == httpWriteUsername:
+ httpWriteDetail = item
+ if not httpWriteDetail:
+ self.msg = "The description and username for the httpWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"httpWrite": httpWriteDetail.get("id")})
+
+ snmp_v3 = AssignCredentials.get("snmp_v3")
+ if snmp_v3:
+ snmpV3Id = snmp_v3.get("id")
+ snmpV3Description = snmp_v3.get("description")
+ if snmpV3Id or snmpV3Description:
+
+ # All snmpV3 details from the Cisco Catalyst Center
+ snmpV3_details = global_credentials.get("snmpV3")
+ if not snmpV3_details:
+ self.msg = "Global snmpV3 Credential is not available."
+ self.status = "failed"
+ return self
+ snmpV3Detail = None
+ if snmpV3Id:
+ snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
+ if not snmpV3Detail:
+ self.msg = "The ID of the snmpV3 credential is not valid."
+ self.status = "failed"
+ return self
+ elif snmpV3Description:
+ for item in snmpV3_details:
+ if item.get("description") == snmpV3Description:
+ snmpV3Detail = item
+ if not snmpV3Detail:
+ self.msg = "The username and description for the snmpV2cWrite credential are invalid."
+ self.status = "failed"
+ return self
+ want.get("assign_credentials").update({"snmpV3Id": snmpV3Detail.get("id")})
+ self.log("Desired State (want): {0}".format(want), "INFO")
+ self.want.update(want)
+ self.msg = "Collected the Credentials needed to be assigned from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_want(self, config):
+ """
+ Get the current Global Device Credentials and Device
+ Credentials assigned to a site form the playbook.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device
+ Credentials configurations and Device Credentials should
+ be assigned to a site.
+
+ Returns:
+ self - The current object with updated information of Global
+ Device Credentials and Device Credentials assigned to a site.
+ """
+
+ if config.get("global_credential_details"):
+ CredentialDetails = config.get("global_credential_details")
+ self.get_want_device_credentials(CredentialDetails).check_return_status()
+
+ if config.get("assign_credentials_to_site"):
+ AssignCredentials = config.get("assign_credentials_to_site")
+ self.get_want_assign_credentials(AssignCredentials).check_return_status()
+
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Successfully retrieved details from the playbook"
+ self.status = "success"
+ return self
+
+ def create_device_credentials(self):
+ """
+ Create Global Device Credential to the Cisco Catalyst
+ Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+ want_create = self.want.get("want_create")
+ if not want_create:
+ result_global_credential.update({
+ "No Creation": {
+ "response": "No Response",
+ "msg": "No Creation is available"
+ }
+ })
+ return self
+
+ credential_params = want_create
+ self.log("Creating global credential API input parameters: {0}"
+ .format(credential_params), "DEBUG")
+ response = self.dnac._exec(
+ family="discovery",
+ function='create_global_credentials_v2',
+ params=credential_params,
+ )
+ self.log("Received API response from 'create_global_credentials_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential addition performed"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Global credential created successfully", "INFO")
+ result_global_credential.update({
+ "Creation": {
+ "response": credential_params,
+ "msg": "Global Credential Created Successfully"
+ }
+ })
+ self.msg = "Global Device Credential Created Successfully"
+ self.status = "success"
+ return self
+
+ def update_device_credentials(self):
+ """
+ Update Device Credential to the Cisco Catalyst Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+
+ # Get the result global credential and want_update from the current object
+ want_update = self.want.get("want_update")
+ # If no credentials to update, update the result and return
+ if not want_update:
+ result_global_credential.update({
+ "No Updation": {
+ "response": "No Response",
+ "msg": "No Updation is available"
+ }
+ })
+ self.msg = "No Updation is available"
+ self.status = "success"
+ return self
+ i = 0
+ flag = True
+ values = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ final_response = []
+ self.log("Desired State for global device credentials updation: {0}"
+ .format(want_update), "DEBUG")
+ while flag:
+ flag = False
+ credential_params = {}
+ for value in values:
+ if want_update.get(value) and i < len(want_update.get(value)):
+ flag = True
+ credential_params.update({value: want_update.get(value)[i]})
+ i = i + 1
+ if credential_params:
+ final_response.append(credential_params)
+ response = self.dnac._exec(
+ family="discovery",
+ function='update_global_credentials_v2',
+ params=credential_params,
+ )
+ self.log("Received API response for 'update_global_credentials_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential update performed"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Updating device credential API input parameters: {0}"
+ .format(final_response), "DEBUG")
+ self.log("Global device credential updated successfully", "INFO")
+ result_global_credential.update({
+ "Updation": {
+ "response": final_response,
+ "msg": "Global Device Credential Updated Successfully"
+ }
+ })
+ self.msg = "Global Device Credential Updated Successfully"
+ self.status = "success"
+ return self
+
+ def assign_credentials_to_site(self):
+ """
+ Assign Global Device Credential to the Cisco Catalyst
+ Center based on the provided playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ result_assign_credential = self.result.get("response")[0].get("assignCredential")
+ credential_params = self.want.get("assign_credentials")
+ final_response = []
+ self.log("Assigning device credential to site API input parameters: {0}"
+ .format(credential_params), "DEBUG")
+ if not credential_params:
+ result_assign_credential.update({
+ "No Assign Credentials": {
+ "response": "No Response",
+ "msg": "No Assignment is available"
+ }
+ })
+ self.msg = "No Assignment is available"
+ self.status = "success"
+ return self
+
+ site_ids = self.want.get("site_id")
+ for site_id in site_ids:
+ credential_params.update({"site_id": site_id})
+ final_response.append(copy.deepcopy(credential_params))
+ response = self.dnac._exec(
+ family="network_settings",
+ function='assign_device_credential_to_site_v2',
+ params=credential_params,
+ )
+ self.log("Received API response for 'assign_device_credential_to_site_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "desired common settings operation successful"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Device credential assigned to site {0} is successfully."
+ .format(site_ids), "INFO")
+ self.log("Desired State for assign credentials to a site: {0}"
+ .format(final_response), "DEBUG")
+ result_assign_credential.update({
+ "Assign Credentials": {
+ "response": final_response,
+ "msg": "Device Credential Assigned to a site is Successfully"
+ }
+ })
+ self.msg = "Global Credential is assigned Successfully"
+ self.status = "success"
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update or Create Global Device Credential and assign device
+ credential to a site in Cisco Catalyst Center based on the playbook provided.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Global
+ Device Credential and assign credentials to a site information.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_credential_details") is not None:
+ self.create_device_credentials().check_return_status()
+
+ if config.get("global_credential_details") is not None:
+ self.update_device_credentials().check_return_status()
+
+ if config.get("assign_credentials_to_site") is not None:
+ self.assign_credentials_to_site().check_return_status()
+
+ return self
+
+ def delete_device_credential(self, config):
+ """
+ Delete Global Device Credential in Cisco Catalyst Center based on the playbook details.
+ Check the return value of the API with check_return_status().
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device Credential information.
+ self - The current object details.
+
+ Returns:
+ self
+ """
+
+ result_global_credential = self.result.get("response")[0].get("globalCredential")
+ have_values = self.have.get("globalCredential")
+ final_response = {}
+ self.log("Global device credentials to be deleted: {0}".format(have_values), "DEBUG")
+ credential_mapping = {
+ "cliCredential": "cli_credential",
+ "snmpV2cRead": "snmp_v2c_read",
+ "snmpV2cWrite": "snmp_v2c_write",
+ "snmpV3": "snmp_v3",
+ "httpsRead": "https_read",
+ "httpsWrite": "https_write"
+ }
+ for item in have_values:
+ config_itr = 0
+ final_response.update({item: []})
+ for value in have_values.get(item):
+ if value is None:
+ self.log("Credential Name: {0}".format(item), "DEBUG")
+ self.log("Credential Item: {0}".format(config.get("global_credential_details")
+ .get(credential_mapping.get(item))), "DEBUG")
+ final_response.get(item).append(
+ str(config.get("global_credential_details")
+ .get(credential_mapping.get(item))[config_itr]) + " is not found."
+ )
+ continue
+ _id = have_values.get(item)[config_itr].get("id")
+ response = self.dnac._exec(
+ family="discovery",
+ function="delete_global_credential_v2",
+ params={"id": _id},
+ )
+ self.log("Received API response for 'delete_global_credential_v2': {0}"
+ .format(response), "DEBUG")
+ validation_string = "global credential deleted successfully"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ final_response.get(item).append(_id)
+ config_itr = config_itr + 1
+
+ self.log("Deleting device credential API input parameters: {0}"
+ .format(final_response), "DEBUG")
+ self.log("Successfully deleted global device credential.", "INFO")
+ result_global_credential.update({
+ "Deletion": {
+ "response": final_response,
+ "msg": "Global Device Credentials Deleted Successfully"
+ }
+ })
+ self.msg = "Global Device Credentials Updated Successfully"
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete Global Device Credential in Cisco Catalyst Center based on the playbook details.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Device Credential information.
+ self - The current object details.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_credential_details") is not None:
+ self.delete_device_credential(config).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.log(str("Entered the verify function."), "DEBUG")
+ self.get_have(config)
+ self.get_want(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+
+ if config.get("global_credential_details") is not None:
+ if self.want.get("want_create"):
+ self.msg = "Global Device Credentials config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ if self.want.get("want_update"):
+ credential_types = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ value_mapping = {
+ "cliCredential": ["username", "description", "id"],
+ "snmpV2cRead": ["description", "id"],
+ "snmpV2cWrite": ["description", "id"],
+ "httpsRead": ["description", "username", "port", "id"],
+ "httpsWrite": ["description", "username", "port", "id"],
+ "snmpV3": ["username", "description", "snmpMode", "id"]
+ }
+ for credential_type in credential_types:
+ if self.want.get(credential_type):
+ want_credential = self.want.get(credential_type)
+ if self.have.get(credential_type):
+ have_credential = self.have.get(credential_type)
+ values = value_mapping.get(credential_type)
+ for value in values:
+ equality = have_credential.get(value) is want_credential.get(value)
+ if not have_credential or not equality:
+ self.msg = "{0} config is not applied ot the Cisco Catalyst Center".format(credential_type)
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated global device credential", "INFO")
+ self.result.get("response")[0].get("globalCredential").update({"Validation": "Success"})
+
+ if config.get("assign_credentials_to_site") is not None:
+ self.log("Successfully validated the assign device credential to site", "INFO")
+ self.result.get("response")[0].get("assignCredential").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Global Device Credential and \
+ Assign Device Credential to Site."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+
+ if config.get("global_credential_details") is not None:
+ have_global_credential = self.have.get("globalCredential")
+ credential_types = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
+ "httpsRead", "httpsWrite", "snmpV3"]
+ for credential_type in credential_types:
+ for item in have_global_credential.get(credential_type):
+ if item is not None:
+ self.msg = "Delete Global Device Credentials config \
+ is not applied to the config"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of global device credential.", "INFO")
+ self.result.get("response")[0].get("globalCredential").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the absence of Global Device Credential."
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values
+
+ Parameters:
+ self
+
+ Returns:
+ self
+ """
+
+ self.have.clear()
+ self.want.clear()
+ return self
+
+
+def main():
+ """main entry point for module execution"""
+
+ # Define the specification for module arguments
+ element_spec = {
+ "dnac_host": {"type": 'str', "required": True},
+ "dnac_port": {"type": 'str', "default": '443'},
+ "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']},
+ "dnac_password": {"type": 'str', "no_log": True},
+ "dnac_verify": {"type": 'bool', "default": 'True'},
+ "dnac_version": {"type": 'str', "default": '2.2.3.3'},
+ "dnac_debug": {"type": 'bool', "default": False},
+ "dnac_log": {"type": 'bool', "default": False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ "config": {"type": 'list', "required": True, "elements": 'dict'},
+ "state": {"default": 'merged', "choices": ['merged', 'deleted']},
+ "validate_response_schema": {"type": 'bool', "default": True},
+ }
+
+ # Create an AnsibleModule object with argument specifications
+ module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False)
+ ccc_credential = DeviceCredential(module)
+ state = ccc_credential.params.get("state")
+ config_verify = ccc_credential.params.get("config_verify")
+ if state not in ccc_credential.supported_states:
+ ccc_credential.status = "invalid"
+ ccc_credential.msg = "State {0} is invalid".format(state)
+ ccc_credential.check_return_status()
+
+ ccc_credential.validate_input().check_return_status()
+
+ for config in ccc_credential.config:
+ ccc_credential.reset_values()
+ ccc_credential.get_have(config).check_return_status()
+ if state != "deleted":
+ ccc_credential.get_want(config).check_return_status()
+ ccc_credential.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_credential.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_credential.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_intent.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_intent.py
new file mode 100644
index 000000000..96759bb9c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_intent.py
@@ -0,0 +1,1713 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra, Phan Nguyen, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: discovery_intent
+short_description: A resource module for handling device discovery tasks.
+description:
+- Manages device discovery using IP address, address range, CDP, and LLDP, including deletion of discovered devices.
+- API to discover a device or multiple devices
+- API to delete a discovery of a device or multiple devices
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.intent_params
+author: Abinash Mishra (@abimishr)
+ Phan Nguyen (@phannguy)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of device being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ discovery_name:
+ description: Name of the discovery task
+ type: str
+ required: true
+ discovery_type:
+ description: Determines the method of device discovery. Here are the available options.
+ - SINGLE discovers a single device using a single IP address.
+ - RANGE discovers multiple devices within a single IP address range.
+ - MULTI RANGE discovers devices across multiple IP address ranges.
+ - CDP uses Cisco Discovery Protocol to discover devices in subsequent layers of the given IP address.
+ - LLDP uses Link Layer Discovery Protocol to discover devices in subsequent layers of the specified IP address.
+ - CIDR discovers devices based on subnet filtering using Classless Inter-Domain Routing.
+ type: str
+ required: true
+ choices: [ 'SINGLE', 'RANGE', 'MULTI RANGE', 'CDP', 'LLDP', 'CIDR']
+ ip_address_list:
+ description: List of IP addresses to be discovered. For CDP/LLDP/SINGLE based discovery, we should
+ pass a list with single element like - 10.197.156.22. For CIDR based discovery, we should pass a list with
+ single element like - 10.197.156.22/22. For RANGE based discovery, we should pass a list with single element
+ and range like - 10.197.156.1-10.197.156.100. For MULTI RANGE based discovery, we should pass a list with multiple
+ elementd like - 10.197.156.1-10.197.156.100 and in next line - 10.197.157.1-10.197.157.100.
+ type: list
+ elements: str
+ required: true
+ ip_filter_list:
+ description: List of IP adddrsess that needs to get filtered out from the IP addresses passed.
+ type: list
+ elements: str
+ cdp_level:
+ description: Total number of levels that are there in cdp's method of discovery
+ type: int
+ default: 16
+ lldp_level:
+ description: Total number of levels that are there in lldp's method of discovery
+ type: int
+ default: 16
+ preferred_mgmt_ip_method:
+ description: Preferred method for the management of the IP (None/UseLoopBack)
+ type: str
+ default: None
+ use_global_credentials:
+ description:
+ - Determines if device discovery should utilize pre-configured global credentials.
+ - Setting to True employs the predefined global credentials for discovery tasks. This is the default setting.
+ - Setting to False requires manually provided, device-specific credentials for discovery, as global credentials will be bypassed.
+ type: bool
+ default: True
+ discovery_specific_credentials:
+ description: Credentials specifically created by the user for performing device discovery.
+ type: dict
+ suboptions:
+ cli_credentials_list:
+ description: List of CLI credentials to be used during device discovery.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for CLI authentication, mandatory when using CLI credentials.
+ type: str
+ password:
+ description: Password for CLI authentication, mandatory when using CLI credential.
+ type: str
+ enable_password:
+ description: Enable password for CLI authentication, mandatory when using CLI credential.
+ type: str
+ http_read_credential:
+ description: HTTP read credential is used for authentication purposes and specifically utilized to
+ grant read-only access to certain resources from the device.
+ type: dict
+ suboptions:
+ username:
+ description: Username for HTTP(S) Read authentication, mandatory when using HTTP credentials.
+ type: str
+ password:
+ description: Password for HTTP(S) Read authentication, mandatory when using HTTP credentials.
+ type: str
+ port:
+ description: Port for HTTP(S) Read authentication, mandatory for using HTTP credentials.
+ type: int
+ secure:
+ description: Flag for HTTP(S) Read authentication, not mandatory when using HTTP credentials.
+ type: bool
+ http_write_credential:
+ description: HTTP write credential is used for authentication purposes and grants Cisco Catalyst Center the
+ ability to alter configurations, update software, or perform other modifications on a network device.
+ type: dict
+ suboptions:
+ username:
+ description: Username for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: str
+ password:
+ description: Password for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: str
+ port:
+ description: Port for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: int
+ secure:
+ description: Flag for HTTP(S) Write authentication, not mandatory when using HTTP credentials.
+ type: bool
+ snmp_v2_read_credential:
+ description:
+ - The SNMP v2 credentials to be created and used for contacting a device via SNMP protocol in read mode.
+ - SNMP v2 also delivers data encryptions, but it uses data types.
+ type: dict
+ suboptions:
+ desc:
+ description: Name/Description of the SNMP read credential to be used for creation of snmp_v2_read_credential.
+ type: str
+ community:
+ description: SNMP V2 Read community string enables Cisco Catalyst Center to extract read-only data from device.
+ type: str
+ snmp_v2_write_credential:
+ description:
+ - The SNMP v2 credentials to be created and used for contacting a device via SNMP protocol in read and write mode.
+ - SNMP v2 also delivers data encryptions, but it uses data types.
+ type: dict
+ suboptions:
+ desc:
+ description: Name/Description of the SNMP write credential to be used for creation of snmp_v2_write_credential.
+ type: str
+ community:
+ description: SNMP V2 Write community string is used to extract data and alter device configurations.
+ type: str
+ snmp_v3_credential:
+ description:
+ - The SNMP v3 credentials to be created and used for contacting a device via SNMP protocol in read and write mode.
+ - SNMPv3 is the most secure version of SNMP, allowing users to fully encrypt transmissions, keeping us safe from external attackers.
+ type: dict
+ suboptions:
+ username:
+ description: Username of the SNMP v3 protocol to be used.
+ type: str
+ snmp_mode:
+ description:
+ - Mode of SNMP which determines the encryption level of our community string.
+ - AUTHPRIV mode uses both Authentication and Encryption.
+ - AUTHNOPRIV mode uses Authentication but no Encryption.
+ - NOAUTHNOPRIV mode does not use either Authentication or Encryption.
+ type: str
+ choices: [ 'AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV' ]
+ auth_password:
+ description:
+ - Authentication Password of the SNMP v3 protocol to be used.
+ - Must be of length greater than 7 characters.
+ - Not required for NOAUTHNOPRIV snmp_mode.
+ type: str
+ auth_type:
+ description:
+ - Authentication type of the SNMP v3 protocol to be used.
+ - SHA uses Secure Hash Algorithm (SHA) as your authentication protocol.
+ - MD5 uses Message Digest 5 (MD5) as your authentication protocol and is not recommended.
+ - Not required for NOAUTHNOPRIV snmp_mode.
+ type: str
+ choices: [ 'SHA', 'MD5' ]
+ privacy_type:
+ description:
+ - Privacy type/protocol of the SNMP v3 protocol to be used in AUTHPRIV SNMP mode
+ - Not required for AUTHNOPRIV and NOAUTHNOPRIV snmp_mode.
+ type: str
+ choices: [ 'AES128', 'AES192', 'AES256' ]
+ privacy_password:
+ description:
+ - Privacy password of the SNMP v3 protocol to be used in AUTHPRIV SNMP mode
+ - Not required for AUTHNOPRIV and NOAUTHNOPRIV snmp_mode.
+ type: str
+ net_conf_port:
+ description:
+ - To be used when network contains IOS XE-based wireless controllers.
+ - This is used for discovery and the enabling of wireless services on the controllers.
+ - Requires valid SSH credentials to work.
+ - Avoid standard ports like 22, 80, and 8080.
+ type: str
+ global_credentials:
+ description:
+ - Set of various credential types, including CLI, SNMP, HTTP, and NETCONF, that a user has pre-configured in
+ the Device Credentials section of the Cisco Catalyst Center.
+ - If user doesn't pass any global credentials in the playbook, then by default, we will use all the global
+ credentials present in the Cisco Catalyst Center of each type for performing discovery. (Max 5 allowed)
+ type: dict
+ version_added: 6.12.0
+ suboptions:
+ cli_credentials_list:
+ description:
+ - Accepts a list of global CLI credentials for use in device discovery.
+ - It's recommended to create device credentials with both a unique username and a clear description.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username required for CLI authentication and is mandatory when using global CLI credentials.
+ type: str
+ description:
+ description: Name of the CLI credential, mandatory when using global CLI credentials.
+ type: str
+ http_read_credential_list:
+ description:
+ - List of global HTTP Read credentials that will be used in the process of discovering devices.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for HTTP Read authentication, mandatory when using global HTTP credentials.
+ type: str
+ description:
+ description: Name of the HTTP Read credential, mandatory when using global HTTP credentials.
+ type: str
+ http_write_credential_list:
+ description:
+ - List of global HTTP Write credentials that will be used in the process of discovering devices.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for HTTP Write authentication, mandatory when using global HTTP credentials.
+ type: str
+ description:
+ description: Name of the HTTP Write credential, mandatory when using global HTTP credentials.
+ type: str
+ snmp_v2_read_credential_list:
+ description:
+ - List of Global SNMP V2 Read credentials to be used during device discovery.
+ - It's recommended to create device credentials with a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the SNMP Read credential, mandatory when using global SNMP credentials.
+ type: str
+ snmp_v2_write_credential_list:
+ description:
+ - List of Global SNMP V2 Write credentials to be used during device discovery.
+ - It's recommended to create device credentials with a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the SNMP Write credential, mandatory when using global SNMP credentials.
+ type: str
+ snmp_v3_credential_list:
+ description:
+ - List of Global SNMP V3 credentials to be used during device discovery, giving read and write mode.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for SNMP V3 authentication, mandatory when using global SNMP credentials.
+ type: str
+ description:
+ description: Name of the SNMP V3 credential, mandatory when using global SNMP credentials.
+ type: str
+ net_conf_port_list:
+ description:
+ - List of Global Net conf ports to be used during device discovery.
+ - It's recommended to create device credentials with unique description.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the Net Conf Port credential, mandatory when using global Net conf port.
+ type: str
+ start_index:
+ description: Start index for the header in fetching SNMP v2 credentials
+ type: int
+ default: 1
+ records_to_return:
+ description: Number of records to return for the header in fetching global v2 credentials
+ type: int
+ default: 100
+ protocol_order:
+ description: Determines the order in which device connections will be attempted. Here are the options
+ - "telnet" Only telnet connections will be tried.
+ - "ssh, telnet" SSH (Secure Shell) will be attempted first, followed by telnet if SSH fails.
+ type: str
+ default: ssh
+ retry:
+ description: Number of times to try establishing connection to device
+ type: int
+ timeout:
+ description: Time to wait for device response in seconds
+ type: int
+ delete_all:
+ description: Parameter to delete all the discoveries at one go
+ type: bool
+ default: False
+requirements:
+- dnacentersdk == 2.6.10
+- python >= 3.5
+notes:
+ - SDK Method used are
+ discovery.Discovery.get_all_global_credentials_v2,
+ discovery.Discovery.start_discovery,
+ task.Task.get_task_by_id,
+ discovery.Discovery.get_discoveries_by_range,
+ discovery.Discovery.get_discovered_network_devices_by_discovery_id',
+ discovery.Discovery.delete_discovery_by_id
+ discovery.Discovery.delete_all_discovery
+ discovery.Discovery.get_count_of_all_discovery_jobs
+
+ - Paths used are
+ get /dna/intent/api/v2/global-credential
+ post /dna/intent/api/v1/discovery
+ get /dna/intent/api/v1/task/{taskId}
+ get /dna/intent/api/v1/discovery/{startIndex}/{recordsToReturn}
+ get /dna/intent/api/v1/discovery/{id}/network-device
+ delete /dna/intent/api/v1/discovery/{id}
+ delete /dna/intent/api/v1/delete
+ get /dna/intent/api/v1/discovery/count
+
+ - Removed 'global_cli_len' option in v6.12.0.
+
+"""
+
+EXAMPLES = r"""
+- name: Execute discovery of devices with both global credentials and discovery specific credentials
+ cisco.dnac.discovery_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Discovery with both global and job specific credentials
+ discovery_type: RANGE
+ ip_address_list:
+ - 201.1.1.1-201.1.1.100
+ ip_filter_list:
+ - 201.1.1.2
+ - 201.1.1.10
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco123
+ enable_password: Cisco123
+ http_read_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: true
+ http_write_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v2_write_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab123
+ privacy_type: AES256
+ privacy_password: Lablab123
+ net_conf_port: 750
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ http_write_credential_list:
+ - description: HTTP Write
+ username: HTTP_Write
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ snmp_v2_read_credential_list:
+ - description: snmpV2_read
+ snmp_v2_write_credential_list:
+ - description: snmpV2_write
+ net_conf_port_list:
+ - description: Old_one
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with discovery specific credentials only
+ cisco.dnac.discovery_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single with discovery specific credentials only
+ discovery_type: SINGLE
+ ip_address_list:
+ - 204.1.1.10
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco123
+ enable_password: Cisco123
+ http_read_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: true
+ http_write_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v2_write_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab123
+ privacy_type: AES256
+ privacy_password: Lablab123
+ net_conf_port: 750
+ use_global_credentials: False
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with global credentials only
+ cisco.dnac.discovery_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CDP with global credentials only
+ discovery_type: CDP
+ ip_address_list:
+ - 204.1.1.1
+ cdp_level: 16
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ http_write_credential_list:
+ - description: HTTP Write
+ username: HTTP_Write
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ snmp_v2_read_credential_list:
+ - description: snmpV2_read
+ snmp_v2_write_credential_list:
+ - description: snmpV2_write
+ net_conf_port_list:
+ - description: Old_one
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with all the global credentials (max 5 allowed)
+ cisco.dnac.discovery_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CIDR with all global credentials
+ discovery_type: CIDR
+ ip_address_list:
+ - 204.1.2.0/24
+ ip_filter_list:
+ - 204.1.2.10
+ preferred_mgmt_ip_method: None
+ start_index: 1
+ records_to_return: 100
+ protocol_order: telnet
+ retry: 10
+ timeout: 3
+ use_global_credentials: True
+
+- name: Delete disovery by name
+ cisco.dnac.discovery_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: deleted
+ config_verify: True
+ config:
+ - discovery_name: Single discovery
+"""
+
+RETURN = r"""
+#Case_1: When the device(s) are discovered successfully.
+response_1:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+#Case_2: Given device details or SNMP mode are not provided
+response_2:
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+#Case_3: Error while deleting a discovery
+response_3:
+ description: A string with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": String,
+ "msg": String
+ }
+"""
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+import time
+import re
+
+
+class Discovery(DnacBase):
+ def __init__(self, module):
+ """
+ Initialize an instance of the class. It also initializes an empty
+ list for 'creds_ids_list' attribute.
+
+ Parameters:
+ - module: The module associated with the class instance.
+
+ Returns:
+ The method does not return a value. Instead, it initializes the
+ following instance attributes:
+ - self.creds_ids_list: An empty list that will be used to store
+ credentials IDs.
+ """
+
+ super().__init__(module)
+ self.creds_ids_list = []
+
+ def validate_input(self, state=None):
+ """
+ Validate the fields provided in the playbook. Checks the
+ configuration provided in the playbook against a predefined
+ specification to ensure it adheres to the expected structure
+ and data types.
+
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call
+ 'validate_input' on it.If the validation succeeds, 'self.status'
+ will be 'success'and 'self.validated_config' will contain the
+ validated configuration. If it fails, 'self.status' will be
+ 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ discovery_spec = {
+ 'cdp_level': {'type': 'int', 'required': False,
+ 'default': 16},
+ 'start_index': {'type': 'int', 'required': False,
+ 'default': 1},
+ 'records_to_return': {'type': 'int', 'required': False,
+ 'default': 100},
+ 'discovery_specific_credentials': {'type': 'dict', 'required': False},
+ 'ip_filter_list': {'type': 'list', 'required': False,
+ 'elements': 'str'},
+ 'lldp_level': {'type': 'int', 'required': False,
+ 'default': 16},
+ 'discovery_name': {'type': 'str', 'required': True},
+ 'netconf_port': {'type': 'str', 'required': False},
+ 'preferred_mgmt_ip_method': {'type': 'str', 'required': False,
+ 'default': 'None'},
+ 'retry': {'type': 'int', 'required': False},
+ 'timeout': {'type': 'str', 'required': False},
+ 'global_credentials': {'type': 'dict', 'required': False},
+ 'protocol_order': {'type': 'str', 'required': False, 'default': 'ssh'},
+ 'use_global_credentials': {'type': 'bool', 'required': False, 'default': True}
+ }
+
+ if state == "merged":
+ discovery_spec["ip_address_list"] = {'type': 'list', 'required': True,
+ 'elements': 'str'}
+ discovery_spec["discovery_type"] = {'type': 'str', 'required': True}
+
+ elif state == "deleted":
+ if self.config[0].get("delete_all") is True:
+ self.validated_config = [{"delete_all": True}]
+ self.msg = "Sucessfully collected input for deletion of all the discoveries"
+ self.log(self.msg, "WARNING")
+ return self
+
+ # Validate discovery params
+ valid_discovery, invalid_params = validate_list_of_dicts(
+ self.config, discovery_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_discovery
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_discovery))
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+ return self
+
+ def get_creds_ids_list(self):
+ """
+ Retrieve the list of credentials IDs associated with class instance.
+
+ Returns:
+ The method returns the list of credentials IDs:
+ - self.creds_ids_list: The list of credentials IDs associated with
+ the class instance.
+ """
+
+ self.log("Credential Ids list passed is {0}".format(str(self.creds_ids_list)), "INFO")
+ return self.creds_ids_list
+
+ def handle_global_credentials(self, response=None):
+ """
+ Method to convert values for create_params API when global paramters
+ are passed as input.
+
+ Parameters:
+ - response: The response collected from the get_all_global_credentials_v2 API
+
+ Returns:
+ - global_credentials_all : The dictionary containing list of IDs of various types of
+ Global credentials.
+ """
+
+ global_credentials = self.validated_config[0].get("global_credentials")
+ global_credentials_all = {}
+
+ cli_credentials_list = global_credentials.get('cli_credentials_list')
+ if cli_credentials_list:
+ if not isinstance(cli_credentials_list, list):
+ msg = "Global CLI credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("cliCredential") is None:
+ msg = "Global CLI credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(cli_credentials_list) > 0:
+ global_credentials_all["cliCredential"] = []
+ cred_len = len(cli_credentials_list)
+ if cred_len > 5:
+ cred_len = 5
+ for cli_cred in cli_credentials_list:
+ if cli_cred.get('description') and cli_cred.get('username'):
+ for cli in response.get("cliCredential"):
+ if cli.get("description") == cli_cred.get('description') and cli.get("username") == cli_cred.get('username'):
+ global_credentials_all["cliCredential"].append(cli.get("id"))
+ global_credentials_all["cliCredential"] = global_credentials_all["cliCredential"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global CLI credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ http_read_credential_list = global_credentials.get('http_read_credential_list')
+ if http_read_credential_list:
+ if not isinstance(http_read_credential_list, list):
+ msg = "Global HTTP read credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("httpsRead") is None:
+ msg = "Global HTTP read credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(http_read_credential_list) > 0:
+ global_credentials_all["httpsRead"] = []
+ cred_len = len(http_read_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for http_cred in http_read_credential_list:
+ if http_cred.get('description') and http_cred.get('username'):
+ for http in response.get("httpsRead"):
+ if http.get("description") == http.get('description') and http.get("username") == http.get('username'):
+ global_credentials_all["httpsRead"].append(http.get("id"))
+ global_credentials_all["httpsRead"] = global_credentials_all["httpsRead"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global HTTP Read credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ http_write_credential_list = global_credentials.get('http_write_credential_list')
+ if http_write_credential_list:
+ if not isinstance(http_write_credential_list, list):
+ msg = "Global HTTP write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("httpsWrite") is None:
+ msg = "Global HTTP write credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(http_write_credential_list) > 0:
+ global_credentials_all["httpsWrite"] = []
+ cred_len = len(http_write_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for http_cred in http_write_credential_list:
+ if http_cred.get('description') and http_cred.get('username'):
+ for http in response.get("httpsWrite"):
+ if http.get("description") == http.get('description') and http.get("username") == http.get('username'):
+ global_credentials_all["httpsWrite"].append(http.get("id"))
+ global_credentials_all["httpsWrite"] = global_credentials_all["httpsWrite"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global HTTP Write credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v2_read_credential_list = global_credentials.get('snmp_v2_read_credential_list')
+ if snmp_v2_read_credential_list:
+ if not isinstance(snmp_v2_read_credential_list, list):
+ msg = "Global SNMPv2 read credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV2cRead") is None:
+ msg = "Global SNMPv2 read credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v2_read_credential_list) > 0:
+ global_credentials_all["snmpV2cRead"] = []
+ cred_len = len(snmp_v2_read_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v2_read_credential_list:
+ if snmp_cred.get('description'):
+ for snmp in response.get("snmpV2cRead"):
+ if snmp.get("description") == snmp_cred.get('description'):
+ global_credentials_all["snmpV2cRead"].append(snmp.get("id"))
+ global_credentials_all["snmpV2cRead"] = global_credentials_all["snmpV2cRead"][:cred_len]
+ else:
+ msg = "Kindly ensure you include the description for the Global SNMPv2 Read \
+ credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v2_write_credential_list = global_credentials.get('snmp_v2_write_credential_list')
+ if snmp_v2_write_credential_list:
+ if not isinstance(snmp_v2_write_credential_list, list):
+ msg = "Global SNMPv2 write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV2cWrite") is None:
+ msg = "Global SNMPv2 write credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v2_write_credential_list) > 0:
+ global_credentials_all["snmpV2cWrite"] = []
+ cred_len = len(snmp_v2_write_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v2_write_credential_list:
+ if snmp_cred.get('description'):
+ for snmp in response.get("snmpV2cWrite"):
+ if snmp.get("description") == snmp_cred.get('description'):
+ global_credentials_all["snmpV2cWrite"].append(snmp.get("id"))
+ global_credentials_all["snmpV2cWrite"] = global_credentials_all["snmpV2cWrite"][:cred_len]
+ else:
+ msg = "Kindly ensure you include the description for the Global SNMPV2 write credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v3_credential_list = global_credentials.get('snmp_v3_credential_list')
+ if snmp_v3_credential_list:
+ if not isinstance(snmp_v3_credential_list, list):
+ msg = "Global SNMPv3 write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV3") is None:
+ msg = "Global SNMPv3 credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v3_credential_list) > 0:
+ global_credentials_all["snmpV3"] = []
+ cred_len = len(snmp_v3_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v3_credential_list:
+ if snmp_cred.get('description') and snmp_cred.get('username'):
+ for snmp in response.get("snmpV3"):
+ if snmp.get("description") == snmp_cred.get('description') and snmp.get("username") == snmp_cred.get('username'):
+ global_credentials_all["snmpV3"].append(snmp.get("id"))
+ global_credentials_all["snmpV3"] = global_credentials_all["snmpV3"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global SNMPv3 \
+ to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ net_conf_port_list = global_credentials.get('net_conf_port_list')
+ if net_conf_port_list:
+ if not isinstance(net_conf_port_list, list):
+ msg = "Global net Conf Ports be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("netconfCredential") is None:
+ msg = "Global netconf ports are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(net_conf_port_list) > 0:
+ global_credentials_all["netconfCredential"] = []
+ cred_len = len(net_conf_port_list)
+ if cred_len > 5:
+ cred_len = 5
+ for port in net_conf_port_list:
+ if port.get("description"):
+ for netconf in response.get("netconfCredential"):
+ if port.get('description') == netconf.get('description'):
+ global_credentials_all["netconfCredential"].append(netconf.get("id"))
+ global_credentials_all["netconfCredential"] = global_credentials_all["netconfCredential"][:cred_len]
+ else:
+ msg = "Please provide valid description of the Global Netconf port to be used"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ self.log("Fetched Global credentials IDs are {0}".format(global_credentials_all), "INFO")
+ return global_credentials_all
+
+ def get_ccc_global_credentials_v2_info(self):
+ """
+ Retrieve the global credentials information (version 2).
+ It applies the 'get_all_global_credentials_v2' function and extracts
+ the IDs of the credentials. If no credentials are found, the
+ function fails with a message.
+
+ Returns:
+ This method does not return a value. However, updates the attributes:
+ - self.creds_ids_list: The list of credentials IDs is extended with
+ the IDs extracted from the response.
+ - self.result: A dictionary that is updated with the credentials IDs.
+ """
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_all_global_credentials_v2',
+ params=self.validated_config[0].get('headers'),
+ )
+ response = response.get('response')
+ self.log("The Global credentials response from 'get all global credentials v2' API is {0}".format(str(response)), "DEBUG")
+ global_credentials_all = {}
+ global_credentials = self.validated_config[0].get("global_credentials")
+ if global_credentials:
+ global_credentials_all = self.handle_global_credentials(response=response)
+
+ global_cred_set = set(global_credentials_all.keys())
+ response_cred_set = set(response.keys())
+ diff_keys = response_cred_set.difference(global_cred_set)
+
+ for key in diff_keys:
+ global_credentials_all[key] = []
+ if response[key] is None:
+ response[key] = []
+ total_len = len(response[key])
+ if total_len > 5:
+ total_len = 5
+ for element in response.get(key):
+ global_credentials_all[key].append(element.get('id'))
+ global_credentials_all[key] = global_credentials_all[key][:total_len]
+
+ if global_credentials_all == {}:
+ msg = 'Not found any global credentials to perform discovery'
+ self.log(msg, "WARNING")
+
+ return global_credentials_all
+
+ def get_devices_list_info(self):
+ """
+ Retrieve the list of devices from the validated configuration.
+ It then updates the result attribute with this list.
+
+ Returns:
+ - ip_address_list: The list of devices extracted from the
+ 'validated_config' attribute.
+ """
+ ip_address_list = self.validated_config[0].get('ip_address_list')
+ self.result.update(dict(devices_info=ip_address_list))
+ self.log("Details of the device list passed: {0}".format(str(ip_address_list)), "INFO")
+ return ip_address_list
+
+ def preprocess_device_discovery(self, ip_address_list=None):
+ """
+ Preprocess the devices' information. Extract the IP addresses from
+ the list of devices and perform additional processing based on the
+ 'discovery_type' in the validated configuration.
+
+ Parameters:
+ - ip_address_list: The list of devices' IP addresses intended for preprocessing.
+ If not provided, an empty list will be used.
+
+ Returns:
+ - ip_address_list: It returns IP address list for the API to process. The value passed
+ for single, CDP, LLDP, CIDR, Range and Multi Range varies depending
+ on the need.
+ """
+
+ if ip_address_list is None:
+ ip_address_list = []
+ discovery_type = self.validated_config[0].get('discovery_type')
+ self.log("Discovery type passed for the discovery is {0}".format(discovery_type), "INFO")
+ if discovery_type in ["SINGLE", "CDP", "LLDP"]:
+ if len(ip_address_list) == 1:
+ ip_address_list = ip_address_list[0]
+ else:
+ self.preprocess_device_discovery_handle_error()
+ elif discovery_type == "CIDR":
+ if len(ip_address_list) == 1:
+ cidr_notation = ip_address_list[0]
+ if len(cidr_notation.split("/")) == 2:
+ ip_address_list = cidr_notation
+ else:
+ ip_address_list = "{0}/30".format(cidr_notation)
+ self.log("CIDR notation is being used for discovery and it requires a prefix length to be specified, such as 1.1.1.1/24.\
+ As no prefix length was provided, it will default to 30.", "WARNING")
+ else:
+ self.preprocess_device_discovery_handle_error()
+ elif discovery_type == "RANGE":
+ if len(ip_address_list) == 1:
+ if len(str(ip_address_list[0]).split("-")) == 2:
+ ip_address_list = ip_address_list[0]
+ else:
+ ip_address_list = "{0}-{1}".format(ip_address_list[0], ip_address_list[0])
+ else:
+ self.preprocess_device_discovery_handle_error()
+ else:
+ new_ip_collected = []
+ for ip in ip_address_list:
+ if len(str(ip).split("-")) != 2:
+ ip_collected = "{0}-{0}".format(ip)
+ new_ip_collected.append(ip_collected)
+ else:
+ new_ip_collected.append(ip)
+ ip_address_list = ','.join(new_ip_collected)
+ self.log("Collected IP address/addresses are {0}".format(str(ip_address_list)), "INFO")
+ return str(ip_address_list)
+
+ def preprocess_device_discovery_handle_error(self):
+ """
+ Method for failing discovery based on the length of list of IP Addresses passed
+ for performing discovery.
+ """
+
+ self.log("IP Address list's length is longer than 1", "ERROR")
+ self.module.fail_json(msg="IP Address list's length is longer than 1", response=[])
+
+ def discovery_specific_cred_failure(self, msg=None):
+ """
+ Method for failing discovery if there is any discrepancy in the credentials
+ passed by the user
+ """
+
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ def handle_discovery_specific_credentials(self, new_object_params=None):
+ """
+ Method to convert values for create_params API when discovery specific paramters
+ are passed as input.
+
+ Parameters:
+ - new_object_params: The dictionary storing various parameters for calling the
+ start discovery API
+
+ Returns:
+ - new_object_params: The dictionary storing various parameters for calling the
+ start discovery API in an updated fashion
+ """
+
+ discovery_specific_credentials = self.validated_config[0].get('discovery_specific_credentials')
+ cli_credentials_list = discovery_specific_credentials.get('cli_credentials_list')
+ http_read_credential = discovery_specific_credentials.get('http_read_credential')
+ http_write_credential = discovery_specific_credentials.get('http_write_credential')
+ snmp_v2_read_credential = discovery_specific_credentials.get('snmp_v2_read_credential')
+ snmp_v2_write_credential = discovery_specific_credentials.get('snmp_v2_write_credential')
+ snmp_v3_credential = discovery_specific_credentials.get('snmp_v3_credential')
+ net_conf_port = discovery_specific_credentials.get('net_conf_port')
+
+ if cli_credentials_list:
+ if not isinstance(cli_credentials_list, list):
+ msg = "Device Specific ClI credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(cli_credentials_list) > 0:
+ username_list = []
+ password_list = []
+ enable_password_list = []
+ for cli_cred in cli_credentials_list:
+ if cli_cred.get('username') and cli_cred.get('password') and cli_cred.get('enable_password'):
+ username_list.append(cli_cred.get('username'))
+ password_list.append(cli_cred.get('password'))
+ enable_password_list.append(cli_cred.get('enable_password'))
+ else:
+ msg = "username, password and enable_password must be passed toether for creating CLI credentials"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['userNameList'] = username_list
+ new_object_params['passwordList'] = password_list
+ new_object_params['enablePasswordList'] = enable_password_list
+
+ if http_read_credential:
+ if not (http_read_credential.get('password') and isinstance(http_read_credential.get('password'), str)):
+ msg = "The password for the HTTP read credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_read_credential.get('username') and isinstance(http_read_credential.get('username'), str)):
+ msg = "The username for the HTTP read credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_read_credential.get('port') and isinstance(http_read_credential.get('port'), int)):
+ msg = "The port for the HTTP read Credential must be of integer type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not isinstance(http_read_credential.get('secure'), bool):
+ msg = "Secure for HTTP read Credential must be of type boolean."
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['httpReadCredential'] = http_read_credential
+
+ if http_write_credential:
+ if not (http_write_credential.get('password') and isinstance(http_write_credential.get('password'), str)):
+ msg = "The password for the HTTP write credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_write_credential.get('username') and isinstance(http_write_credential.get('username'), str)):
+ msg = "The username for the HTTP write credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_write_credential.get('port') and isinstance(http_write_credential.get('port'), int)):
+ msg = "The port for the HTTP write Credential must be of integer type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not isinstance(http_write_credential.get('secure'), bool):
+ msg = "Secure for HTTP write Credential must be of type boolean."
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['httpWriteCredential'] = http_write_credential
+
+ if snmp_v2_read_credential:
+ if not (snmp_v2_read_credential.get('desc')) and isinstance(snmp_v2_read_credential.get('desc'), str):
+ msg = "Name/description for the SNMP v2 read credential must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v2_read_credential.get('community')) and isinstance(snmp_v2_read_credential.get('community'), str):
+ msg = "The community string must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpROCommunityDesc'] = snmp_v2_read_credential.get('desc')
+ new_object_params['snmpROCommunity'] = snmp_v2_read_credential.get('community')
+ new_object_params['snmpVersion'] = "v2"
+
+ if snmp_v2_write_credential:
+ if not (snmp_v2_write_credential.get('desc')) and isinstance(snmp_v2_write_credential.get('desc'), str):
+ msg = "Name/description for the SNMP v2 write credential must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v2_write_credential.get('community')) and isinstance(snmp_v2_write_credential.get('community'), str):
+ msg = "The community string must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpRWCommunityDesc'] = snmp_v2_write_credential.get('desc')
+ new_object_params['snmpRWCommunity'] = snmp_v2_write_credential.get('community')
+ new_object_params['snmpVersion'] = "v2"
+
+ if snmp_v3_credential:
+ if not (snmp_v3_credential.get('username')) and isinstance(snmp_v3_credential.get('username'), str):
+ msg = "Username of SNMP v3 protocol must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('snmp_mode')) and isinstance(snmp_v3_credential.get('snmp_mode'), str):
+ msg = "Mode of SNMP is madantory to use SNMPv3 protocol and must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if (snmp_v3_credential.get('snmp_mode')) == "AUTHPRIV" or snmp_v3_credential.get('snmp_mode') == "AUTHNOPRIV":
+ if not (snmp_v3_credential.get('auth_password')) and isinstance(snmp_v3_credential.get('auth_password'), str):
+ msg = "Authorization password must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('auth_type')) and isinstance(snmp_v3_credential.get('auth_type'), str):
+ msg = "Authorization type must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if snmp_v3_credential.get('snmp_mode') == "AUTHPRIV":
+ if not (snmp_v3_credential.get('privacy_type')) and isinstance(snmp_v3_credential.get('privacy_type'), str):
+ msg = "Privacy type must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('privacy_password')) and isinstance(snmp_v3_credential.get('privacy_password'), str):
+ msg = "Privacy password must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpUserName'] = snmp_v3_credential.get('username')
+ new_object_params['snmpMode'] = snmp_v3_credential.get('snmp_mode')
+ new_object_params['snmpAuthPassphrase'] = snmp_v3_credential.get('auth_password')
+ new_object_params['snmpAuthProtocol'] = snmp_v3_credential.get('auth_type')
+ new_object_params['snmpPrivProtocol'] = snmp_v3_credential.get('privacy_type')
+ new_object_params['snmpPrivPassphrase'] = snmp_v3_credential.get('privacy_password')
+ new_object_params['snmpVersion'] = "v3"
+
+ if net_conf_port:
+ new_object_params['netconfPort'] = str(net_conf_port)
+
+ return new_object_params
+
+ def create_params(self, ip_address_list=None):
+ """
+ Create a new parameter object based on the validated configuration,
+ credential IDs, and IP address list.
+
+ Parameters:
+ - credential_ids: The list of credential IDs to include in the
+ parameters. If not provided, an empty list is used.
+ - ip_address_list: The list of IP addresses to include in the
+ parameters. If not provided, None is used.
+
+ Returns:
+ - new_object_params: A dictionary containing the newly created
+ parameters.
+ """
+
+ credential_ids = []
+
+ new_object_params = {}
+ new_object_params['cdpLevel'] = self.validated_config[0].get('cdp_level')
+ new_object_params['discoveryType'] = self.validated_config[0].get('discovery_type')
+ new_object_params['ipAddressList'] = ip_address_list
+ new_object_params['ipFilterList'] = self.validated_config[0].get('ip_filter_list')
+ new_object_params['lldpLevel'] = self.validated_config[0].get('lldp_level')
+ new_object_params['name'] = self.validated_config[0].get('discovery_name')
+ new_object_params['preferredMgmtIPMethod'] = self.validated_config[0].get('preferred_mgmt_ip_method')
+ new_object_params['protocolOrder'] = self.validated_config[0].get('protocol_order')
+ new_object_params['retry'] = self.validated_config[0].get('retry')
+ new_object_params['timeout'] = self.validated_config[0].get('timeout')
+
+ if self.validated_config[0].get('discovery_specific_credentials'):
+ self.handle_discovery_specific_credentials(new_object_params=new_object_params)
+
+ global_cred_flag = self.validated_config[0].get('use_global_credentials')
+ global_credentials_all = {}
+
+ if global_cred_flag is True:
+ global_credentials_all = self.get_ccc_global_credentials_v2_info()
+ for global_cred_list in global_credentials_all.values():
+ credential_ids.extend(global_cred_list)
+ new_object_params['globalCredentialIdList'] = credential_ids
+
+ self.log("All the global credentials used for the discovery task are {0}".format(str(global_credentials_all)), "DEBUG")
+
+ if not (new_object_params.get('snmpUserName') or new_object_params.get('snmpROCommunityDesc') or new_object_params.get('snmpRWCommunityDesc')
+ or global_credentials_all.get('snmpV2cRead') or global_credentials_all.get('snmpV2cWrite') or global_credentials_all.get('snmpV3')):
+ msg = "Please provide atleast one valid SNMP credential to perform Discovery"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ if not (new_object_params.get('userNameList') or global_credentials_all.get('cliCredential')):
+ msg = "Please provide atleast one valid CLI credential to perform Discovery"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ self.log("The payload/object created for calling the start discovery API is {0}".format(str(new_object_params)), "INFO")
+
+ return new_object_params
+
+ def create_discovery(self, ip_address_list=None):
+ """
+ Start a new discovery process in the Cisco Catalyst Center. It creates the
+ parameters required for the discovery and then calls the
+ 'start_discovery' function. The result of the discovery process
+ is added to the 'result' attribute.
+
+ Parameters:
+ - credential_ids: The list of credential IDs to include in the
+ discovery. If not provided, an empty list is used.
+ - ip_address_list: The list of IP addresses to include in the
+ discovery. If not provided, None is used.
+
+ Returns:
+ - task_id: The ID of the task created for the discovery process.
+ """
+
+ result = self.dnac_apply['exec'](
+ family="discovery",
+ function="start_discovery",
+ params=self.create_params(ip_address_list=ip_address_list),
+ op_modifies=True,
+ )
+
+ self.log("The response received post discovery creation API called is {0}".format(str(result)), "DEBUG")
+
+ self.result.update(dict(discovery_result=result))
+ self.log("Task Id of the API task created is {0}".format(result.response.get('taskId')), "INFO")
+ return result.response.get('taskId')
+
+ def get_task_status(self, task_id=None):
+ """
+ Monitor the status of a task in the Cisco Catalyst Center. It checks the task
+ status periodically until the task is no longer 'In Progress'.
+ If the task encounters an error or fails, it immediately fails the
+ module and returns False.
+
+ Parameters:
+ - task_id: The ID of the task to monitor.
+
+ Returns:
+ - result: True if the task completed successfully, False otherwise.
+ """
+
+ result = False
+ params = dict(task_id=task_id)
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function='get_task_by_id',
+ params=params,
+ )
+ response = response.response
+ self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO")
+ if response.get('isError') or re.search(
+ 'failed', response.get('progress'), flags=re.IGNORECASE
+ ):
+ msg = 'Discovery task with id {0} has not completed - Reason: {1}'.format(
+ task_id, response.get("failureReason"))
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+ return False
+ self.log("Task status for the task id (before checking status) {0} is {1}".format(str(task_id), str(response)), "INFO")
+ progress = response.get('progress')
+ if progress in ('In Progress', 'Inventory service initiating discovery'):
+ time.sleep(3)
+ continue
+ else:
+ result = True
+ self.log("The Process is completed", "INFO")
+ break
+ self.result.update(dict(discovery_task=response))
+ return result
+
+ def lookup_discovery_by_range_via_name(self):
+ """
+ Retrieve a specific discovery by name from a range of
+ discoveries in the Cisco Catalyst Center.
+
+ Returns:
+ - discovery: The discovery with the specified name from the range
+ of discoveries. If no matching discovery is found, it
+ returns None.
+ """
+ start_index = self.validated_config[0].get("start_index")
+ records_to_return = self.validated_config[0].get("records_to_return")
+
+ response = {"response": []}
+ if records_to_return > 500:
+ num_intervals = records_to_return // 500
+ for num in range(0, num_intervals + 1):
+ params = dict(
+ start_index=1 + num * 500,
+ records_to_return=500,
+ headers=self.validated_config[0].get("headers")
+ )
+ response_part = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discoveries_by_range',
+ params=params
+ )
+ response["response"].extend(response_part["response"])
+ else:
+ params = dict(
+ start_index=self.validated_config[0].get("start_index"),
+ records_to_return=self.validated_config[0].get("records_to_return"),
+ headers=self.validated_config[0].get("headers"),
+ )
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discoveries_by_range',
+ params=params
+ )
+ self.log("Response of the get discoveries via range API is {0}".format(str(response)), "DEBUG")
+
+ return next(
+ filter(
+ lambda x: x['name'] == self.validated_config[0].get('discovery_name'),
+ response.get("response")
+ ), None
+ )
+
+ def get_discoveries_by_range_until_success(self):
+ """
+ Continuously retrieve a specific discovery by name from a range of
+ discoveries in the Cisco Catalyst Center until the discovery is complete.
+
+ Returns:
+ - discovery: The completed discovery with the specified name from
+ the range of discoveries. If the discovery is not
+ found or not completed, the function fails the module
+ and returns None.
+ """
+
+ result = False
+ discovery = self.lookup_discovery_by_range_via_name()
+
+ if not discovery:
+ msg = 'Cannot find any discovery task with name {0} -- Discovery result: {1}'.format(
+ str(self.validated_config[0].get("discovery_name")), str(discovery))
+ self.log(msg, "INFO")
+ self.module.fail_json(msg=msg)
+
+ while True:
+ discovery = self.lookup_discovery_by_range_via_name()
+ if discovery.get('discoveryCondition') == 'Complete':
+ result = True
+ break
+
+ time.sleep(3)
+
+ if not result:
+ msg = 'Cannot find any discovery task with name {0} -- Discovery result: {1}'.format(
+ str(self.validated_config[0].get("discovery_name")), str(discovery))
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ self.result.update(dict(discovery_range=discovery))
+ return discovery
+
+ def get_discovery_device_info(self, discovery_id=None, task_id=None):
+ """
+ Retrieve the information of devices discovered by a specific discovery
+ process in the Cisco Catalyst Center. It checks the reachability status of the
+ devices periodically until all devices are reachable or until a
+ maximum of 3 attempts.
+
+ Parameters:
+ - discovery_id: ID of the discovery process to retrieve devices from.
+ - task_id: ID of the task associated with the discovery process.
+
+ Returns:
+ - result: True if all devices are reachable, False otherwise.
+ """
+
+ params = dict(
+ id=discovery_id,
+ task_id=task_id,
+ headers=self.validated_config[0].get("headers"),
+ )
+ result = False
+ count = 0
+ while True:
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discovered_network_devices_by_discovery_id',
+ params=params,
+ )
+ devices = response.response
+
+ self.log("Retrieved device details using the API 'get_discovered_network_devices_by_discovery_id': {0}".format(str(devices)), "DEBUG")
+ if all(res.get('reachabilityStatus') == 'Success' for res in devices):
+ result = True
+ self.log("All devices in the range are reachable", "INFO")
+ break
+
+ elif any(res.get('reachabilityStatus') == 'Success' for res in devices):
+ result = True
+ self.log("Some devices in the range are reachable", "INFO")
+ break
+
+ elif all(res.get('reachabilityStatus') != 'Success' for res in devices):
+ result = True
+ self.log("All devices are not reachable, but discovery is completed", "WARNING")
+ break
+
+ count += 1
+ if count == 3:
+ break
+
+ time.sleep(3)
+
+ if not result:
+ msg = 'Discovery network device with id {0} has not completed'.format(discovery_id)
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ self.log('Discovery network device with id {0} got completed'.format(discovery_id), "INFO")
+ self.result.update(dict(discovery_device_info=devices))
+ return result
+
+ def get_exist_discovery(self):
+ """
+ Retrieve an existing discovery by its name from a range of discoveries.
+
+ Returns:
+ - discovery: The discovery with the specified name from the range of
+ discoveries. If no matching discovery is found, it
+ returns None and updates the 'exist_discovery' entry in
+ the result dictionary to None.
+ """
+ discovery = self.lookup_discovery_by_range_via_name()
+ if not discovery:
+ self.result.update(dict(exist_discovery=discovery))
+ return None
+
+ have = dict(exist_discovery=discovery)
+ self.have = have
+ self.result.update(dict(exist_discovery=discovery))
+ return discovery
+
+ def delete_exist_discovery(self, params):
+ """
+ Delete an existing discovery in the Cisco Catalyst Center by its ID.
+
+ Parameters:
+ - params: A dictionary containing the parameters for the delete
+ operation, including the ID of the discovery to delete.
+
+ Returns:
+ - task_id: The ID of the task created for the delete operation.
+ """
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function="delete_discovery_by_id",
+ params=params,
+ )
+
+ self.log("Response collected from API 'delete_discovery_by_id': {0}".format(str(response)), "DEBUG")
+ self.result.update(dict(delete_discovery=response))
+ self.log("Task Id of the deletion task is {0}".format(response.response.get('taskId')), "INFO")
+ return response.response.get('taskId')
+
+ def get_diff_merged(self):
+ """
+ Retrieve the information of devices discovered by a specific discovery
+ process in the Cisco Catalyst Center, delete existing discoveries if they exist,
+ and create a new discovery. The function also updates various
+ attributes of the class instance.
+
+ Returns:
+ - self: The instance of the class with updated attributes.
+ """
+
+ devices_list_info = self.get_devices_list_info()
+ ip_address_list = self.preprocess_device_discovery(devices_list_info)
+ exist_discovery = self.get_exist_discovery()
+ if exist_discovery:
+ params = dict(id=exist_discovery.get('id'))
+ discovery_task_id = self.delete_exist_discovery(params=params)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+
+ discovery_task_id = self.create_discovery(
+ ip_address_list=ip_address_list)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+ discovery_task_info = self.get_discoveries_by_range_until_success()
+ result = self.get_discovery_device_info(discovery_id=discovery_task_info.get('id'))
+ self.result["changed"] = True
+ self.result['msg'] = "Discovery Created Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = discovery_task_id
+ self.result.update(dict(msg='Discovery Created Successfully'))
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def get_diff_deleted(self):
+ """
+ Delete an existing discovery in the Cisco Catalyst Center by its name, and
+ updates various attributes of the class instance. If no
+ discovery with the specified name is found, the function
+ updates the 'msg' attribute with an appropriate message.
+
+ Returns:
+ - self: The instance of the class with updated attributes.
+ """
+
+ if self.validated_config[0].get("delete_all"):
+ count_discoveries = self.dnac_apply['exec'](
+ family="discovery",
+ function="get_count_of_all_discovery_jobs",
+ )
+ if count_discoveries.get("response") == 0:
+ msg = "There are no discoveries present in the Discovery Dashboard for deletion"
+ self.result['msg'] = msg
+ self.log(msg, "WARNING")
+ self.result['response'] = self.validated_config[0]
+ return self
+
+ delete_all_response = self.dnac_apply['exec'](
+ family="discovery",
+ function="delete_all_discovery",
+ )
+ discovery_task_id = delete_all_response.get('response').get('taskId')
+ self.result["changed"] = True
+ self.result['msg'] = "All of the Discoveries Deleted Successfully"
+ self.result['diff'] = self.validated_config
+
+ else:
+ exist_discovery = self.get_exist_discovery()
+ if not exist_discovery:
+ self.result['msg'] = "Discovery {0} Not Found".format(
+ self.validated_config[0].get("discovery_name"))
+ self.log(self.result['msg'], "ERROR")
+ return self
+
+ params = dict(id=exist_discovery.get('id'))
+ discovery_task_id = self.delete_exist_discovery(params=params)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Successfully deleted discovery"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = discovery_task_id
+
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's Discovery Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+ discovery_task_info = self.get_discoveries_by_range_until_success()
+ discovery_id = discovery_task_info.get('id')
+ params = dict(
+ id=discovery_id
+ )
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discovery_by_id',
+ params=params
+ )
+ discovery_name = config.get('discovery_name')
+ if response:
+ self.log("Requested Discovery with name {0} is completed".format(discovery_name), "INFO")
+
+ else:
+ self.log("Requested Discovery with name {0} is not completed".format(discovery_name), "WARNING")
+ self.status = "success"
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified discovery(s) exists in the Cisco Catalyst Center configuration's
+ Discovery Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for deleted state
+ if config.get("delete_all") is True:
+ count_discoveries = self.dnac_apply['exec'](
+ family="discovery",
+ function="get_count_of_all_discovery_jobs",
+ )
+ if count_discoveries == 0:
+ self.log("All discoveries are deleted", "INFO")
+ else:
+ self.log("All discoveries are not deleted", "WARNING")
+ self.status = "success"
+ return self
+
+ discovery_task_info = self.lookup_discovery_by_range_via_name()
+ discovery_name = config.get('discovery_name')
+ if discovery_task_info:
+ self.log("Requested Discovery with name {0} is present".format(discovery_name), "WARNING")
+
+ else:
+ self.log("Requested Discovery with name {0} is not present and deleted".format(discovery_name), "INFO")
+ self.status = "success"
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_discovery = Discovery(module)
+ config_verify = ccc_discovery.params.get("config_verify")
+
+ state = ccc_discovery.params.get("state")
+ if state not in ccc_discovery.supported_states:
+ ccc_discovery.status = "invalid"
+ ccc_discovery.msg = "State {0} is invalid".format(state)
+ ccc_discovery.check_return_status()
+
+ ccc_discovery.validate_input(state=state).check_return_status()
+ for config in ccc_discovery.validated_config:
+ ccc_discovery.reset_values()
+ ccc_discovery.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_discovery.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_discovery.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_workflow_manager.py
new file mode 100644
index 000000000..88ce124a3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_workflow_manager.py
@@ -0,0 +1,1713 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra, Phan Nguyen, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: discovery_workflow_manager
+short_description: A resource module for handling device discovery tasks.
+description:
+- Manages device discovery using IP address, address range, CDP, and LLDP, including deletion of discovered devices.
+- API to discover a device or multiple devices
+- API to delete a discovery of a device or multiple devices
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abinash Mishra (@abimishr)
+ Phan Nguyen (@phannguy)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of device being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ discovery_name:
+ description: Name of the discovery task
+ type: str
+ required: true
+ discovery_type:
+ description: Determines the method of device discovery. Here are the available options.
+ - SINGLE discovers a single device using a single IP address.
+ - RANGE discovers multiple devices within a single IP address range.
+ - MULTI RANGE discovers devices across multiple IP address ranges.
+ - CDP uses Cisco Discovery Protocol to discover devices in subsequent layers of the given IP address.
+ - LLDP uses Link Layer Discovery Protocol to discover devices in subsequent layers of the specified IP address.
+ - CIDR discovers devices based on subnet filtering using Classless Inter-Domain Routing.
+ type: str
+ required: true
+ choices: [ 'SINGLE', 'RANGE', 'MULTI RANGE', 'CDP', 'LLDP', 'CIDR']
+ ip_address_list:
+ description: List of IP addresses to be discovered. For CDP/LLDP/SINGLE based discovery, we should
+ pass a list with single element like - 10.197.156.22. For CIDR based discovery, we should pass a list with
+ single element like - 10.197.156.22/22. For RANGE based discovery, we should pass a list with single element
+ and range like - 10.197.156.1-10.197.156.100. For MULTI RANGE based discovery, we should pass a list with multiple
+ elementd like - 10.197.156.1-10.197.156.100 and in next line - 10.197.157.1-10.197.157.100.
+ type: list
+ elements: str
+ required: true
+ ip_filter_list:
+ description: List of IP adddrsess that needs to get filtered out from the IP addresses passed.
+ type: list
+ elements: str
+ cdp_level:
+ description: Total number of levels that are there in cdp's method of discovery
+ type: int
+ default: 16
+ lldp_level:
+ description: Total number of levels that are there in lldp's method of discovery
+ type: int
+ default: 16
+ preferred_mgmt_ip_method:
+ description: Preferred method for the management of the IP (None/UseLoopBack)
+ type: str
+ default: None
+ use_global_credentials:
+ description:
+ - Determines if device discovery should utilize pre-configured global credentials.
+ - Setting to True employs the predefined global credentials for discovery tasks. This is the default setting.
+ - Setting to False requires manually provided, device-specific credentials for discovery, as global credentials will be bypassed.
+ type: bool
+ default: True
+ discovery_specific_credentials:
+ description: Credentials specifically created by the user for performing device discovery.
+ type: dict
+ suboptions:
+ cli_credentials_list:
+ description: List of CLI credentials to be used during device discovery.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for CLI authentication, mandatory when using CLI credentials.
+ type: str
+ password:
+ description: Password for CLI authentication, mandatory when using CLI credential.
+ type: str
+ enable_password:
+ description: Enable password for CLI authentication, mandatory when using CLI credential.
+ type: str
+ http_read_credential:
+ description: HTTP read credential is used for authentication purposes and specifically utilized to
+ grant read-only access to certain resources from the device.
+ type: dict
+ suboptions:
+ username:
+ description: Username for HTTP(S) Read authentication, mandatory when using HTTP credentials.
+ type: str
+ password:
+ description: Password for HTTP(S) Read authentication, mandatory when using HTTP credentials.
+ type: str
+ port:
+ description: Port for HTTP(S) Read authentication, mandatory for using HTTP credentials.
+ type: int
+ secure:
+ description: Flag for HTTP(S) Read authentication, not mandatory when using HTTP credentials.
+ type: bool
+ http_write_credential:
+ description: HTTP write credential is used for authentication purposes and grants Cisco Catalyst Center the
+ ability to alter configurations, update software, or perform other modifications on a network device.
+ type: dict
+ suboptions:
+ username:
+ description: Username for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: str
+ password:
+ description: Password for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: str
+ port:
+ description: Port for HTTP(S) Write authentication, mandatory when using HTTP credentials.
+ type: int
+ secure:
+ description: Flag for HTTP(S) Write authentication, not mandatory when using HTTP credentials.
+ type: bool
+ snmp_v2_read_credential:
+ description:
+ - The SNMP v2 credentials to be created and used for contacting a device via SNMP protocol in read mode.
+ - SNMP v2 also delivers data encryptions, but it uses data types.
+ type: dict
+ suboptions:
+ desc:
+ description: Name/Description of the SNMP read credential to be used for creation of snmp_v2_read_credential.
+ type: str
+ community:
+ description: SNMP V2 Read community string enables Cisco Catalyst Center to extract read-only data from device.
+ type: str
+ snmp_v2_write_credential:
+ description:
+ - The SNMP v2 credentials to be created and used for contacting a device via SNMP protocol in read and write mode.
+ - SNMP v2 also delivers data encryptions, but it uses data types.
+ type: dict
+ suboptions:
+ desc:
+ description: Name/Description of the SNMP write credential to be used for creation of snmp_v2_write_credential.
+ type: str
+ community:
+ description: SNMP V2 Write community string is used to extract data and alter device configurations.
+ type: str
+ snmp_v3_credential:
+ description:
+ - The SNMP v3 credentials to be created and used for contacting a device via SNMP protocol in read and write mode.
+ - SNMPv3 is the most secure version of SNMP, allowing users to fully encrypt transmissions, keeping us safe from external attackers.
+ type: dict
+ suboptions:
+ username:
+ description: Username of the SNMP v3 protocol to be used.
+ type: str
+ snmp_mode:
+ description:
+ - Mode of SNMP which determines the encryption level of our community string.
+ - AUTHPRIV mode uses both Authentication and Encryption.
+ - AUTHNOPRIV mode uses Authentication but no Encryption.
+ - NOAUTHNOPRIV mode does not use either Authentication or Encryption.
+ type: str
+ choices: [ 'AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV' ]
+ auth_password:
+ description:
+ - Authentication Password of the SNMP v3 protocol to be used.
+ - Must be of length greater than 7 characters.
+ - Not required for NOAUTHNOPRIV snmp_mode.
+ type: str
+ auth_type:
+ description:
+ - Authentication type of the SNMP v3 protocol to be used.
+ - SHA uses Secure Hash Algorithm (SHA) as your authentication protocol.
+ - MD5 uses Message Digest 5 (MD5) as your authentication protocol and is not recommended.
+ - Not required for NOAUTHNOPRIV snmp_mode.
+ type: str
+ choices: [ 'SHA', 'MD5' ]
+ privacy_type:
+ description:
+ - Privacy type/protocol of the SNMP v3 protocol to be used in AUTHPRIV SNMP mode
+ - Not required for AUTHNOPRIV and NOAUTHNOPRIV snmp_mode.
+ type: str
+ choices: [ 'AES128', 'AES192', 'AES256' ]
+ privacy_password:
+ description:
+ - Privacy password of the SNMP v3 protocol to be used in AUTHPRIV SNMP mode
+ - Not required for AUTHNOPRIV and NOAUTHNOPRIV snmp_mode.
+ type: str
+ net_conf_port:
+ description:
+ - To be used when network contains IOS XE-based wireless controllers.
+ - This is used for discovery and the enabling of wireless services on the controllers.
+ - Requires valid SSH credentials to work.
+ - Avoid standard ports like 22, 80, and 8080.
+ type: str
+ global_credentials:
+ description:
+ - Set of various credential types, including CLI, SNMP, HTTP, and NETCONF, that a user has pre-configured in
+ the Device Credentials section of the Cisco Catalyst Center.
+ - If user doesn't pass any global credentials in the playbook, then by default, we will use all the global
+ credentials present in the Cisco Catalyst Center of each type for performing discovery. (Max 5 allowed)
+ type: dict
+ version_added: 6.12.0
+ suboptions:
+ cli_credentials_list:
+ description:
+ - Accepts a list of global CLI credentials for use in device discovery.
+ - It's recommended to create device credentials with both a unique username and a clear description.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username required for CLI authentication and is mandatory when using global CLI credentials.
+ type: str
+ description:
+ description: Name of the CLI credential, mandatory when using global CLI credentials.
+ type: str
+ http_read_credential_list:
+ description:
+ - List of global HTTP Read credentials that will be used in the process of discovering devices.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for HTTP Read authentication, mandatory when using global HTTP credentials.
+ type: str
+ description:
+ description: Name of the HTTP Read credential, mandatory when using global HTTP credentials.
+ type: str
+ http_write_credential_list:
+ description:
+ - List of global HTTP Write credentials that will be used in the process of discovering devices.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for HTTP Write authentication, mandatory when using global HTTP credentials.
+ type: str
+ description:
+ description: Name of the HTTP Write credential, mandatory when using global HTTP credentials.
+ type: str
+ snmp_v2_read_credential_list:
+ description:
+ - List of Global SNMP V2 Read credentials to be used during device discovery.
+ - It's recommended to create device credentials with a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the SNMP Read credential, mandatory when using global SNMP credentials.
+ type: str
+ snmp_v2_write_credential_list:
+ description:
+ - List of Global SNMP V2 Write credentials to be used during device discovery.
+ - It's recommended to create device credentials with a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the SNMP Write credential, mandatory when using global SNMP credentials.
+ type: str
+ snmp_v3_credential_list:
+ description:
+ - List of Global SNMP V3 credentials to be used during device discovery, giving read and write mode.
+ - It's recommended to create device credentials with both a unique username and a clear description for easy identification.
+ type: list
+ elements: dict
+ suboptions:
+ username:
+ description: Username for SNMP V3 authentication, mandatory when using global SNMP credentials.
+ type: str
+ description:
+ description: Name of the SNMP V3 credential, mandatory when using global SNMP credentials.
+ type: str
+ net_conf_port_list:
+ description:
+ - List of Global Net conf ports to be used during device discovery.
+ - It's recommended to create device credentials with unique description.
+ type: list
+ elements: dict
+ suboptions:
+ description:
+ description: Name of the Net Conf Port credential, mandatory when using global Net conf port.
+ type: str
+ start_index:
+ description: Start index for the header in fetching SNMP v2 credentials
+ type: int
+ default: 1
+ records_to_return:
+ description: Number of records to return for the header in fetching global v2 credentials
+ type: int
+ default: 100
+ protocol_order:
+ description: Determines the order in which device connections will be attempted. Here are the options
+ - "telnet" Only telnet connections will be tried.
+ - "ssh, telnet" SSH (Secure Shell) will be attempted first, followed by telnet if SSH fails.
+ type: str
+ default: ssh
+ retry:
+ description: Number of times to try establishing connection to device
+ type: int
+ timeout:
+ description: Time to wait for device response in seconds
+ type: int
+ delete_all:
+ description: Parameter to delete all the discoveries at one go
+ type: bool
+ default: False
+requirements:
+- dnacentersdk == 2.6.10
+- python >= 3.5
+notes:
+ - SDK Method used are
+ discovery.Discovery.get_all_global_credentials_v2,
+ discovery.Discovery.start_discovery,
+ task.Task.get_task_by_id,
+ discovery.Discovery.get_discoveries_by_range,
+ discovery.Discovery.get_discovered_network_devices_by_discovery_id',
+ discovery.Discovery.delete_discovery_by_id
+ discovery.Discovery.delete_all_discovery
+ discovery.Discovery.get_count_of_all_discovery_jobs
+
+ - Paths used are
+ get /dna/intent/api/v2/global-credential
+ post /dna/intent/api/v1/discovery
+ get /dna/intent/api/v1/task/{taskId}
+ get /dna/intent/api/v1/discovery/{startIndex}/{recordsToReturn}
+ get /dna/intent/api/v1/discovery/{id}/network-device
+ delete /dna/intent/api/v1/discovery/{id}
+ delete /dna/intent/api/v1/delete
+ get /dna/intent/api/v1/discovery/count
+
+ - Removed 'global_cli_len' option in v6.12.0.
+
+"""
+
+EXAMPLES = r"""
+- name: Execute discovery of devices with both global credentials and discovery specific credentials
+ cisco.dnac.discovery_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Discovery with both global and job specific credentials
+ discovery_type: RANGE
+ ip_address_list:
+ - 201.1.1.1-201.1.1.100
+ ip_filter_list:
+ - 201.1.1.2
+ - 201.1.1.10
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco123
+ enable_password: Cisco123
+ http_read_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: true
+ http_write_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v2_write_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab123
+ privacy_type: AES256
+ privacy_password: Lablab123
+ net_conf_port: 750
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ http_write_credential_list:
+ - description: HTTP Write
+ username: HTTP_Write
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ snmp_v2_read_credential_list:
+ - description: snmpV2_read
+ snmp_v2_write_credential_list:
+ - description: snmpV2_write
+ net_conf_port_list:
+ - description: Old_one
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with discovery specific credentials only
+ cisco.dnac.discovery_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: Single with discovery specific credentials only
+ discovery_type: SINGLE
+ ip_address_list:
+ - 204.1.1.10
+ discovery_specific_credentials:
+ cli_credentials_list:
+ - username: cisco
+ password: Cisco123
+ enable_password: Cisco123
+ http_read_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: true
+ http_write_credential:
+ username: cisco
+ password: Cisco123
+ port: 443
+ secure: True
+ snmp_v2_read_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v2_write_credential:
+ desc: snmp_v2-new
+ community: Cisco123
+ snmp_v3_credential:
+ username: v3Public2
+ snmp_mode: AUTHPRIV
+ auth_type: SHA
+ auth_password: Lablab123
+ privacy_type: AES256
+ privacy_password: Lablab123
+ net_conf_port: 750
+ use_global_credentials: False
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with global credentials only
+ cisco.dnac.discovery_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CDP with global credentials only
+ discovery_type: CDP
+ ip_address_list:
+ - 204.1.1.1
+ cdp_level: 16
+ global_credentials:
+ cli_credentials_list:
+ - description: ISE
+ username: cisco
+ - description: CLI1234
+ username: cli
+ http_read_credential_list:
+ - description: HTTP Read
+ username: HTTP_Read
+ http_write_credential_list:
+ - description: HTTP Write
+ username: HTTP_Write
+ snmp_v3_credential_list:
+ - description: snmpV3
+ username: snmpV3
+ snmp_v2_read_credential_list:
+ - description: snmpV2_read
+ snmp_v2_write_credential_list:
+ - description: snmpV2_write
+ net_conf_port_list:
+ - description: Old_one
+ start_index: 1
+ records_to_return: 100
+ protocol_order: ssh
+ retry: 5
+ timeout: 3
+
+- name: Execute discovery of devices with all the global credentials (max 5 allowed)
+ cisco.dnac.discovery_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - discovery_name: CIDR with all global credentials
+ discovery_type: CIDR
+ ip_address_list:
+ - 204.1.2.0/24
+ ip_filter_list:
+ - 204.1.2.10
+ preferred_mgmt_ip_method: None
+ start_index: 1
+ records_to_return: 100
+ protocol_order: telnet
+ retry: 10
+ timeout: 3
+ use_global_credentials: True
+
+- name: Delete disovery by name
+ cisco.dnac.discovery_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: deleted
+ config_verify: True
+ config:
+ - discovery_name: Single discovery
+"""
+
+RETURN = r"""
+#Case_1: When the device(s) are discovered successfully.
+response_1:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+#Case_2: Given device details or SNMP mode are not provided
+response_2:
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+#Case_3: Error while deleting a discovery
+response_3:
+ description: A string with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": String,
+ "msg": String
+ }
+"""
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+import time
+import re
+
+
+class Discovery(DnacBase):
+ def __init__(self, module):
+ """
+ Initialize an instance of the class. It also initializes an empty
+ list for 'creds_ids_list' attribute.
+
+ Parameters:
+ - module: The module associated with the class instance.
+
+ Returns:
+ The method does not return a value. Instead, it initializes the
+ following instance attributes:
+ - self.creds_ids_list: An empty list that will be used to store
+ credentials IDs.
+ """
+
+ super().__init__(module)
+ self.creds_ids_list = []
+
+ def validate_input(self, state=None):
+ """
+ Validate the fields provided in the playbook. Checks the
+ configuration provided in the playbook against a predefined
+ specification to ensure it adheres to the expected structure
+ and data types.
+
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call
+ 'validate_input' on it.If the validation succeeds, 'self.status'
+ will be 'success'and 'self.validated_config' will contain the
+ validated configuration. If it fails, 'self.status' will be
+ 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ discovery_spec = {
+ 'cdp_level': {'type': 'int', 'required': False,
+ 'default': 16},
+ 'start_index': {'type': 'int', 'required': False,
+ 'default': 1},
+ 'records_to_return': {'type': 'int', 'required': False,
+ 'default': 100},
+ 'discovery_specific_credentials': {'type': 'dict', 'required': False},
+ 'ip_filter_list': {'type': 'list', 'required': False,
+ 'elements': 'str'},
+ 'lldp_level': {'type': 'int', 'required': False,
+ 'default': 16},
+ 'discovery_name': {'type': 'str', 'required': True},
+ 'netconf_port': {'type': 'str', 'required': False},
+ 'preferred_mgmt_ip_method': {'type': 'str', 'required': False,
+ 'default': 'None'},
+ 'retry': {'type': 'int', 'required': False},
+ 'timeout': {'type': 'str', 'required': False},
+ 'global_credentials': {'type': 'dict', 'required': False},
+ 'protocol_order': {'type': 'str', 'required': False, 'default': 'ssh'},
+ 'use_global_credentials': {'type': 'bool', 'required': False, 'default': True}
+ }
+
+ if state == "merged":
+ discovery_spec["ip_address_list"] = {'type': 'list', 'required': True,
+ 'elements': 'str'}
+ discovery_spec["discovery_type"] = {'type': 'str', 'required': True}
+
+ elif state == "deleted":
+ if self.config[0].get("delete_all") is True:
+ self.validated_config = [{"delete_all": True}]
+ self.msg = "Sucessfully collected input for deletion of all the discoveries"
+ self.log(self.msg, "WARNING")
+ return self
+
+ # Validate discovery params
+ valid_discovery, invalid_params = validate_list_of_dicts(
+ self.config, discovery_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_discovery
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_discovery))
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+ return self
+
+ def get_creds_ids_list(self):
+ """
+ Retrieve the list of credentials IDs associated with class instance.
+
+ Returns:
+ The method returns the list of credentials IDs:
+ - self.creds_ids_list: The list of credentials IDs associated with
+ the class instance.
+ """
+
+ self.log("Credential Ids list passed is {0}".format(str(self.creds_ids_list)), "INFO")
+ return self.creds_ids_list
+
+ def handle_global_credentials(self, response=None):
+ """
+ Method to convert values for create_params API when global paramters
+ are passed as input.
+
+ Parameters:
+ - response: The response collected from the get_all_global_credentials_v2 API
+
+ Returns:
+ - global_credentials_all : The dictionary containing list of IDs of various types of
+ Global credentials.
+ """
+
+ global_credentials = self.validated_config[0].get("global_credentials")
+ global_credentials_all = {}
+
+ cli_credentials_list = global_credentials.get('cli_credentials_list')
+ if cli_credentials_list:
+ if not isinstance(cli_credentials_list, list):
+ msg = "Global CLI credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("cliCredential") is None:
+ msg = "Global CLI credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(cli_credentials_list) > 0:
+ global_credentials_all["cliCredential"] = []
+ cred_len = len(cli_credentials_list)
+ if cred_len > 5:
+ cred_len = 5
+ for cli_cred in cli_credentials_list:
+ if cli_cred.get('description') and cli_cred.get('username'):
+ for cli in response.get("cliCredential"):
+ if cli.get("description") == cli_cred.get('description') and cli.get("username") == cli_cred.get('username'):
+ global_credentials_all["cliCredential"].append(cli.get("id"))
+ global_credentials_all["cliCredential"] = global_credentials_all["cliCredential"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global CLI credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ http_read_credential_list = global_credentials.get('http_read_credential_list')
+ if http_read_credential_list:
+ if not isinstance(http_read_credential_list, list):
+ msg = "Global HTTP read credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("httpsRead") is None:
+ msg = "Global HTTP read credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(http_read_credential_list) > 0:
+ global_credentials_all["httpsRead"] = []
+ cred_len = len(http_read_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for http_cred in http_read_credential_list:
+ if http_cred.get('description') and http_cred.get('username'):
+ for http in response.get("httpsRead"):
+ if http.get("description") == http.get('description') and http.get("username") == http.get('username'):
+ global_credentials_all["httpsRead"].append(http.get("id"))
+ global_credentials_all["httpsRead"] = global_credentials_all["httpsRead"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global HTTP Read credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ http_write_credential_list = global_credentials.get('http_write_credential_list')
+ if http_write_credential_list:
+ if not isinstance(http_write_credential_list, list):
+ msg = "Global HTTP write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("httpsWrite") is None:
+ msg = "Global HTTP write credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(http_write_credential_list) > 0:
+ global_credentials_all["httpsWrite"] = []
+ cred_len = len(http_write_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for http_cred in http_write_credential_list:
+ if http_cred.get('description') and http_cred.get('username'):
+ for http in response.get("httpsWrite"):
+ if http.get("description") == http.get('description') and http.get("username") == http.get('username'):
+ global_credentials_all["httpsWrite"].append(http.get("id"))
+ global_credentials_all["httpsWrite"] = global_credentials_all["httpsWrite"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global HTTP Write credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v2_read_credential_list = global_credentials.get('snmp_v2_read_credential_list')
+ if snmp_v2_read_credential_list:
+ if not isinstance(snmp_v2_read_credential_list, list):
+ msg = "Global SNMPv2 read credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV2cRead") is None:
+ msg = "Global SNMPv2 read credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v2_read_credential_list) > 0:
+ global_credentials_all["snmpV2cRead"] = []
+ cred_len = len(snmp_v2_read_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v2_read_credential_list:
+ if snmp_cred.get('description'):
+ for snmp in response.get("snmpV2cRead"):
+ if snmp.get("description") == snmp_cred.get('description'):
+ global_credentials_all["snmpV2cRead"].append(snmp.get("id"))
+ global_credentials_all["snmpV2cRead"] = global_credentials_all["snmpV2cRead"][:cred_len]
+ else:
+ msg = "Kindly ensure you include the description for the Global SNMPv2 Read \
+ credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v2_write_credential_list = global_credentials.get('snmp_v2_write_credential_list')
+ if snmp_v2_write_credential_list:
+ if not isinstance(snmp_v2_write_credential_list, list):
+ msg = "Global SNMPv2 write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV2cWrite") is None:
+ msg = "Global SNMPv2 write credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v2_write_credential_list) > 0:
+ global_credentials_all["snmpV2cWrite"] = []
+ cred_len = len(snmp_v2_write_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v2_write_credential_list:
+ if snmp_cred.get('description'):
+ for snmp in response.get("snmpV2cWrite"):
+ if snmp.get("description") == snmp_cred.get('description'):
+ global_credentials_all["snmpV2cWrite"].append(snmp.get("id"))
+ global_credentials_all["snmpV2cWrite"] = global_credentials_all["snmpV2cWrite"][:cred_len]
+ else:
+ msg = "Kindly ensure you include the description for the Global SNMPV2 write credential to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ snmp_v3_credential_list = global_credentials.get('snmp_v3_credential_list')
+ if snmp_v3_credential_list:
+ if not isinstance(snmp_v3_credential_list, list):
+ msg = "Global SNMPv3 write credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("snmpV3") is None:
+ msg = "Global SNMPv3 credentials are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(snmp_v3_credential_list) > 0:
+ global_credentials_all["snmpV3"] = []
+ cred_len = len(snmp_v3_credential_list)
+ if cred_len > 5:
+ cred_len = 5
+ for snmp_cred in snmp_v3_credential_list:
+ if snmp_cred.get('description') and snmp_cred.get('username'):
+ for snmp in response.get("snmpV3"):
+ if snmp.get("description") == snmp_cred.get('description') and snmp.get("username") == snmp_cred.get('username'):
+ global_credentials_all["snmpV3"].append(snmp.get("id"))
+ global_credentials_all["snmpV3"] = global_credentials_all["snmpV3"][:cred_len]
+ else:
+ msg = "Kindly ensure you include both the description and the username for the Global SNMPv3 \
+ to discover the devices"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ net_conf_port_list = global_credentials.get('net_conf_port_list')
+ if net_conf_port_list:
+ if not isinstance(net_conf_port_list, list):
+ msg = "Global net Conf Ports be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if response.get("netconfCredential") is None:
+ msg = "Global netconf ports are not present in the Cisco Catalyst Center"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(net_conf_port_list) > 0:
+ global_credentials_all["netconfCredential"] = []
+ cred_len = len(net_conf_port_list)
+ if cred_len > 5:
+ cred_len = 5
+ for port in net_conf_port_list:
+ if port.get("description"):
+ for netconf in response.get("netconfCredential"):
+ if port.get('description') == netconf.get('description'):
+ global_credentials_all["netconfCredential"].append(netconf.get("id"))
+ global_credentials_all["netconfCredential"] = global_credentials_all["netconfCredential"][:cred_len]
+ else:
+ msg = "Please provide valid description of the Global Netconf port to be used"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ self.log("Fetched Global credentials IDs are {0}".format(global_credentials_all), "INFO")
+ return global_credentials_all
+
+ def get_ccc_global_credentials_v2_info(self):
+ """
+ Retrieve the global credentials information (version 2).
+ It applies the 'get_all_global_credentials_v2' function and extracts
+ the IDs of the credentials. If no credentials are found, the
+ function fails with a message.
+
+ Returns:
+ This method does not return a value. However, updates the attributes:
+ - self.creds_ids_list: The list of credentials IDs is extended with
+ the IDs extracted from the response.
+ - self.result: A dictionary that is updated with the credentials IDs.
+ """
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_all_global_credentials_v2',
+ params=self.validated_config[0].get('headers'),
+ )
+ response = response.get('response')
+ self.log("The Global credentials response from 'get all global credentials v2' API is {0}".format(str(response)), "DEBUG")
+ global_credentials_all = {}
+ global_credentials = self.validated_config[0].get("global_credentials")
+ if global_credentials:
+ global_credentials_all = self.handle_global_credentials(response=response)
+
+ global_cred_set = set(global_credentials_all.keys())
+ response_cred_set = set(response.keys())
+ diff_keys = response_cred_set.difference(global_cred_set)
+
+ for key in diff_keys:
+ global_credentials_all[key] = []
+ if response[key] is None:
+ response[key] = []
+ total_len = len(response[key])
+ if total_len > 5:
+ total_len = 5
+ for element in response.get(key):
+ global_credentials_all[key].append(element.get('id'))
+ global_credentials_all[key] = global_credentials_all[key][:total_len]
+
+ if global_credentials_all == {}:
+ msg = 'Not found any global credentials to perform discovery'
+ self.log(msg, "WARNING")
+
+ return global_credentials_all
+
+ def get_devices_list_info(self):
+ """
+ Retrieve the list of devices from the validated configuration.
+ It then updates the result attribute with this list.
+
+ Returns:
+ - ip_address_list: The list of devices extracted from the
+ 'validated_config' attribute.
+ """
+ ip_address_list = self.validated_config[0].get('ip_address_list')
+ self.result.update(dict(devices_info=ip_address_list))
+ self.log("Details of the device list passed: {0}".format(str(ip_address_list)), "INFO")
+ return ip_address_list
+
+ def preprocess_device_discovery(self, ip_address_list=None):
+ """
+ Preprocess the devices' information. Extract the IP addresses from
+ the list of devices and perform additional processing based on the
+ 'discovery_type' in the validated configuration.
+
+ Parameters:
+ - ip_address_list: The list of devices' IP addresses intended for preprocessing.
+ If not provided, an empty list will be used.
+
+ Returns:
+ - ip_address_list: It returns IP address list for the API to process. The value passed
+ for single, CDP, LLDP, CIDR, Range and Multi Range varies depending
+ on the need.
+ """
+
+ if ip_address_list is None:
+ ip_address_list = []
+ discovery_type = self.validated_config[0].get('discovery_type')
+ self.log("Discovery type passed for the discovery is {0}".format(discovery_type), "INFO")
+ if discovery_type in ["SINGLE", "CDP", "LLDP"]:
+ if len(ip_address_list) == 1:
+ ip_address_list = ip_address_list[0]
+ else:
+ self.preprocess_device_discovery_handle_error()
+ elif discovery_type == "CIDR":
+ if len(ip_address_list) == 1:
+ cidr_notation = ip_address_list[0]
+ if len(cidr_notation.split("/")) == 2:
+ ip_address_list = cidr_notation
+ else:
+ ip_address_list = "{0}/30".format(cidr_notation)
+ self.log("CIDR notation is being used for discovery and it requires a prefix length to be specified, such as 1.1.1.1/24.\
+ As no prefix length was provided, it will default to 30.", "WARNING")
+ else:
+ self.preprocess_device_discovery_handle_error()
+ elif discovery_type == "RANGE":
+ if len(ip_address_list) == 1:
+ if len(str(ip_address_list[0]).split("-")) == 2:
+ ip_address_list = ip_address_list[0]
+ else:
+ ip_address_list = "{0}-{1}".format(ip_address_list[0], ip_address_list[0])
+ else:
+ self.preprocess_device_discovery_handle_error()
+ else:
+ new_ip_collected = []
+ for ip in ip_address_list:
+ if len(str(ip).split("-")) != 2:
+ ip_collected = "{0}-{0}".format(ip)
+ new_ip_collected.append(ip_collected)
+ else:
+ new_ip_collected.append(ip)
+ ip_address_list = ','.join(new_ip_collected)
+ self.log("Collected IP address/addresses are {0}".format(str(ip_address_list)), "INFO")
+ return str(ip_address_list)
+
+ def preprocess_device_discovery_handle_error(self):
+ """
+ Method for failing discovery based on the length of list of IP Addresses passed
+ for performing discovery.
+ """
+
+ self.log("IP Address list's length is longer than 1", "ERROR")
+ self.module.fail_json(msg="IP Address list's length is longer than 1", response=[])
+
+ def discovery_specific_cred_failure(self, msg=None):
+ """
+ Method for failing discovery if there is any discrepancy in the credentials
+ passed by the user
+ """
+
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ def handle_discovery_specific_credentials(self, new_object_params=None):
+ """
+ Method to convert values for create_params API when discovery specific paramters
+ are passed as input.
+
+ Parameters:
+ - new_object_params: The dictionary storing various parameters for calling the
+ start discovery API
+
+ Returns:
+ - new_object_params: The dictionary storing various parameters for calling the
+ start discovery API in an updated fashion
+ """
+
+ discovery_specific_credentials = self.validated_config[0].get('discovery_specific_credentials')
+ cli_credentials_list = discovery_specific_credentials.get('cli_credentials_list')
+ http_read_credential = discovery_specific_credentials.get('http_read_credential')
+ http_write_credential = discovery_specific_credentials.get('http_write_credential')
+ snmp_v2_read_credential = discovery_specific_credentials.get('snmp_v2_read_credential')
+ snmp_v2_write_credential = discovery_specific_credentials.get('snmp_v2_write_credential')
+ snmp_v3_credential = discovery_specific_credentials.get('snmp_v3_credential')
+ net_conf_port = discovery_specific_credentials.get('net_conf_port')
+
+ if cli_credentials_list:
+ if not isinstance(cli_credentials_list, list):
+ msg = "Device Specific ClI credentials must be passed as a list"
+ self.discovery_specific_cred_failure(msg=msg)
+ if len(cli_credentials_list) > 0:
+ username_list = []
+ password_list = []
+ enable_password_list = []
+ for cli_cred in cli_credentials_list:
+ if cli_cred.get('username') and cli_cred.get('password') and cli_cred.get('enable_password'):
+ username_list.append(cli_cred.get('username'))
+ password_list.append(cli_cred.get('password'))
+ enable_password_list.append(cli_cred.get('enable_password'))
+ else:
+ msg = "username, password and enable_password must be passed toether for creating CLI credentials"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['userNameList'] = username_list
+ new_object_params['passwordList'] = password_list
+ new_object_params['enablePasswordList'] = enable_password_list
+
+ if http_read_credential:
+ if not (http_read_credential.get('password') and isinstance(http_read_credential.get('password'), str)):
+ msg = "The password for the HTTP read credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_read_credential.get('username') and isinstance(http_read_credential.get('username'), str)):
+ msg = "The username for the HTTP read credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_read_credential.get('port') and isinstance(http_read_credential.get('port'), int)):
+ msg = "The port for the HTTP read Credential must be of integer type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not isinstance(http_read_credential.get('secure'), bool):
+ msg = "Secure for HTTP read Credential must be of type boolean."
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['httpReadCredential'] = http_read_credential
+
+ if http_write_credential:
+ if not (http_write_credential.get('password') and isinstance(http_write_credential.get('password'), str)):
+ msg = "The password for the HTTP write credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_write_credential.get('username') and isinstance(http_write_credential.get('username'), str)):
+ msg = "The username for the HTTP write credential must be of string type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (http_write_credential.get('port') and isinstance(http_write_credential.get('port'), int)):
+ msg = "The port for the HTTP write Credential must be of integer type."
+ self.discovery_specific_cred_failure(msg=msg)
+ if not isinstance(http_write_credential.get('secure'), bool):
+ msg = "Secure for HTTP write Credential must be of type boolean."
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['httpWriteCredential'] = http_write_credential
+
+ if snmp_v2_read_credential:
+ if not (snmp_v2_read_credential.get('desc')) and isinstance(snmp_v2_read_credential.get('desc'), str):
+ msg = "Name/description for the SNMP v2 read credential must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v2_read_credential.get('community')) and isinstance(snmp_v2_read_credential.get('community'), str):
+ msg = "The community string must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpROCommunityDesc'] = snmp_v2_read_credential.get('desc')
+ new_object_params['snmpROCommunity'] = snmp_v2_read_credential.get('community')
+ new_object_params['snmpVersion'] = "v2"
+
+ if snmp_v2_write_credential:
+ if not (snmp_v2_write_credential.get('desc')) and isinstance(snmp_v2_write_credential.get('desc'), str):
+ msg = "Name/description for the SNMP v2 write credential must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v2_write_credential.get('community')) and isinstance(snmp_v2_write_credential.get('community'), str):
+ msg = "The community string must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpRWCommunityDesc'] = snmp_v2_write_credential.get('desc')
+ new_object_params['snmpRWCommunity'] = snmp_v2_write_credential.get('community')
+ new_object_params['snmpVersion'] = "v2"
+
+ if snmp_v3_credential:
+ if not (snmp_v3_credential.get('username')) and isinstance(snmp_v3_credential.get('username'), str):
+ msg = "Username of SNMP v3 protocol must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('snmp_mode')) and isinstance(snmp_v3_credential.get('snmp_mode'), str):
+ msg = "Mode of SNMP is madantory to use SNMPv3 protocol and must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if (snmp_v3_credential.get('snmp_mode')) == "AUTHPRIV" or snmp_v3_credential.get('snmp_mode') == "AUTHNOPRIV":
+ if not (snmp_v3_credential.get('auth_password')) and isinstance(snmp_v3_credential.get('auth_password'), str):
+ msg = "Authorization password must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('auth_type')) and isinstance(snmp_v3_credential.get('auth_type'), str):
+ msg = "Authorization type must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if snmp_v3_credential.get('snmp_mode') == "AUTHPRIV":
+ if not (snmp_v3_credential.get('privacy_type')) and isinstance(snmp_v3_credential.get('privacy_type'), str):
+ msg = "Privacy type must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ if not (snmp_v3_credential.get('privacy_password')) and isinstance(snmp_v3_credential.get('privacy_password'), str):
+ msg = "Privacy password must be of string type"
+ self.discovery_specific_cred_failure(msg=msg)
+ new_object_params['snmpUserName'] = snmp_v3_credential.get('username')
+ new_object_params['snmpMode'] = snmp_v3_credential.get('snmp_mode')
+ new_object_params['snmpAuthPassphrase'] = snmp_v3_credential.get('auth_password')
+ new_object_params['snmpAuthProtocol'] = snmp_v3_credential.get('auth_type')
+ new_object_params['snmpPrivProtocol'] = snmp_v3_credential.get('privacy_type')
+ new_object_params['snmpPrivPassphrase'] = snmp_v3_credential.get('privacy_password')
+ new_object_params['snmpVersion'] = "v3"
+
+ if net_conf_port:
+ new_object_params['netconfPort'] = str(net_conf_port)
+
+ return new_object_params
+
+ def create_params(self, ip_address_list=None):
+ """
+ Create a new parameter object based on the validated configuration,
+ credential IDs, and IP address list.
+
+ Parameters:
+ - credential_ids: The list of credential IDs to include in the
+ parameters. If not provided, an empty list is used.
+ - ip_address_list: The list of IP addresses to include in the
+ parameters. If not provided, None is used.
+
+ Returns:
+ - new_object_params: A dictionary containing the newly created
+ parameters.
+ """
+
+ credential_ids = []
+
+ new_object_params = {}
+ new_object_params['cdpLevel'] = self.validated_config[0].get('cdp_level')
+ new_object_params['discoveryType'] = self.validated_config[0].get('discovery_type')
+ new_object_params['ipAddressList'] = ip_address_list
+ new_object_params['ipFilterList'] = self.validated_config[0].get('ip_filter_list')
+ new_object_params['lldpLevel'] = self.validated_config[0].get('lldp_level')
+ new_object_params['name'] = self.validated_config[0].get('discovery_name')
+ new_object_params['preferredMgmtIPMethod'] = self.validated_config[0].get('preferred_mgmt_ip_method')
+ new_object_params['protocolOrder'] = self.validated_config[0].get('protocol_order')
+ new_object_params['retry'] = self.validated_config[0].get('retry')
+ new_object_params['timeout'] = self.validated_config[0].get('timeout')
+
+ if self.validated_config[0].get('discovery_specific_credentials'):
+ self.handle_discovery_specific_credentials(new_object_params=new_object_params)
+
+ global_cred_flag = self.validated_config[0].get('use_global_credentials')
+ global_credentials_all = {}
+
+ if global_cred_flag is True:
+ global_credentials_all = self.get_ccc_global_credentials_v2_info()
+ for global_cred_list in global_credentials_all.values():
+ credential_ids.extend(global_cred_list)
+ new_object_params['globalCredentialIdList'] = credential_ids
+
+ self.log("All the global credentials used for the discovery task are {0}".format(str(global_credentials_all)), "DEBUG")
+
+ if not (new_object_params.get('snmpUserName') or new_object_params.get('snmpROCommunityDesc') or new_object_params.get('snmpRWCommunityDesc')
+ or global_credentials_all.get('snmpV2cRead') or global_credentials_all.get('snmpV2cWrite') or global_credentials_all.get('snmpV3')):
+ msg = "Please provide atleast one valid SNMP credential to perform Discovery"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ if not (new_object_params.get('userNameList') or global_credentials_all.get('cliCredential')):
+ msg = "Please provide atleast one valid CLI credential to perform Discovery"
+ self.discovery_specific_cred_failure(msg=msg)
+
+ self.log("The payload/object created for calling the start discovery API is {0}".format(str(new_object_params)), "INFO")
+
+ return new_object_params
+
+ def create_discovery(self, ip_address_list=None):
+ """
+ Start a new discovery process in the Cisco Catalyst Center. It creates the
+ parameters required for the discovery and then calls the
+ 'start_discovery' function. The result of the discovery process
+ is added to the 'result' attribute.
+
+ Parameters:
+ - credential_ids: The list of credential IDs to include in the
+ discovery. If not provided, an empty list is used.
+ - ip_address_list: The list of IP addresses to include in the
+ discovery. If not provided, None is used.
+
+ Returns:
+ - task_id: The ID of the task created for the discovery process.
+ """
+
+ result = self.dnac_apply['exec'](
+ family="discovery",
+ function="start_discovery",
+ params=self.create_params(ip_address_list=ip_address_list),
+ op_modifies=True,
+ )
+
+ self.log("The response received post discovery creation API called is {0}".format(str(result)), "DEBUG")
+
+ self.result.update(dict(discovery_result=result))
+ self.log("Task Id of the API task created is {0}".format(result.response.get('taskId')), "INFO")
+ return result.response.get('taskId')
+
+ def get_task_status(self, task_id=None):
+ """
+ Monitor the status of a task in the Cisco Catalyst Center. It checks the task
+ status periodically until the task is no longer 'In Progress'.
+ If the task encounters an error or fails, it immediately fails the
+ module and returns False.
+
+ Parameters:
+ - task_id: The ID of the task to monitor.
+
+ Returns:
+ - result: True if the task completed successfully, False otherwise.
+ """
+
+ result = False
+ params = dict(task_id=task_id)
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function='get_task_by_id',
+ params=params,
+ )
+ response = response.response
+ self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO")
+ if response.get('isError') or re.search(
+ 'failed', response.get('progress'), flags=re.IGNORECASE
+ ):
+ msg = 'Discovery task with id {0} has not completed - Reason: {1}'.format(
+ task_id, response.get("failureReason"))
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+ return False
+ self.log("Task status for the task id (before checking status) {0} is {1}".format(str(task_id), str(response)), "INFO")
+ progress = response.get('progress')
+ if progress in ('In Progress', 'Inventory service initiating discovery'):
+ time.sleep(3)
+ continue
+ else:
+ result = True
+ self.log("The Process is completed", "INFO")
+ break
+ self.result.update(dict(discovery_task=response))
+ return result
+
+ def lookup_discovery_by_range_via_name(self):
+ """
+ Retrieve a specific discovery by name from a range of
+ discoveries in the Cisco Catalyst Center.
+
+ Returns:
+ - discovery: The discovery with the specified name from the range
+ of discoveries. If no matching discovery is found, it
+ returns None.
+ """
+ start_index = self.validated_config[0].get("start_index")
+ records_to_return = self.validated_config[0].get("records_to_return")
+
+ response = {"response": []}
+ if records_to_return > 500:
+ num_intervals = records_to_return // 500
+ for num in range(0, num_intervals + 1):
+ params = dict(
+ start_index=1 + num * 500,
+ records_to_return=500,
+ headers=self.validated_config[0].get("headers")
+ )
+ response_part = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discoveries_by_range',
+ params=params
+ )
+ response["response"].extend(response_part["response"])
+ else:
+ params = dict(
+ start_index=self.validated_config[0].get("start_index"),
+ records_to_return=self.validated_config[0].get("records_to_return"),
+ headers=self.validated_config[0].get("headers"),
+ )
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discoveries_by_range',
+ params=params
+ )
+ self.log("Response of the get discoveries via range API is {0}".format(str(response)), "DEBUG")
+
+ return next(
+ filter(
+ lambda x: x['name'] == self.validated_config[0].get('discovery_name'),
+ response.get("response")
+ ), None
+ )
+
+ def get_discoveries_by_range_until_success(self):
+ """
+ Continuously retrieve a specific discovery by name from a range of
+ discoveries in the Cisco Catalyst Center until the discovery is complete.
+
+ Returns:
+ - discovery: The completed discovery with the specified name from
+ the range of discoveries. If the discovery is not
+ found or not completed, the function fails the module
+ and returns None.
+ """
+
+ result = False
+ discovery = self.lookup_discovery_by_range_via_name()
+
+ if not discovery:
+ msg = 'Cannot find any discovery task with name {0} -- Discovery result: {1}'.format(
+ str(self.validated_config[0].get("discovery_name")), str(discovery))
+ self.log(msg, "INFO")
+ self.module.fail_json(msg=msg)
+
+ while True:
+ discovery = self.lookup_discovery_by_range_via_name()
+ if discovery.get('discoveryCondition') == 'Complete':
+ result = True
+ break
+
+ time.sleep(3)
+
+ if not result:
+ msg = 'Cannot find any discovery task with name {0} -- Discovery result: {1}'.format(
+ str(self.validated_config[0].get("discovery_name")), str(discovery))
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ self.result.update(dict(discovery_range=discovery))
+ return discovery
+
+ def get_discovery_device_info(self, discovery_id=None, task_id=None):
+ """
+ Retrieve the information of devices discovered by a specific discovery
+ process in the Cisco Catalyst Center. It checks the reachability status of the
+ devices periodically until all devices are reachable or until a
+ maximum of 3 attempts.
+
+ Parameters:
+ - discovery_id: ID of the discovery process to retrieve devices from.
+ - task_id: ID of the task associated with the discovery process.
+
+ Returns:
+ - result: True if all devices are reachable, False otherwise.
+ """
+
+ params = dict(
+ id=discovery_id,
+ task_id=task_id,
+ headers=self.validated_config[0].get("headers"),
+ )
+ result = False
+ count = 0
+ while True:
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discovered_network_devices_by_discovery_id',
+ params=params,
+ )
+ devices = response.response
+
+ self.log("Retrieved device details using the API 'get_discovered_network_devices_by_discovery_id': {0}".format(str(devices)), "DEBUG")
+ if all(res.get('reachabilityStatus') == 'Success' for res in devices):
+ result = True
+ self.log("All devices in the range are reachable", "INFO")
+ break
+
+ elif any(res.get('reachabilityStatus') == 'Success' for res in devices):
+ result = True
+ self.log("Some devices in the range are reachable", "INFO")
+ break
+
+ elif all(res.get('reachabilityStatus') != 'Success' for res in devices):
+ result = True
+ self.log("All devices are not reachable, but discovery is completed", "WARNING")
+ break
+
+ count += 1
+ if count == 3:
+ break
+
+ time.sleep(3)
+
+ if not result:
+ msg = 'Discovery network device with id {0} has not completed'.format(discovery_id)
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ self.log('Discovery network device with id {0} got completed'.format(discovery_id), "INFO")
+ self.result.update(dict(discovery_device_info=devices))
+ return result
+
+ def get_exist_discovery(self):
+ """
+ Retrieve an existing discovery by its name from a range of discoveries.
+
+ Returns:
+ - discovery: The discovery with the specified name from the range of
+ discoveries. If no matching discovery is found, it
+ returns None and updates the 'exist_discovery' entry in
+ the result dictionary to None.
+ """
+ discovery = self.lookup_discovery_by_range_via_name()
+ if not discovery:
+ self.result.update(dict(exist_discovery=discovery))
+ return None
+
+ have = dict(exist_discovery=discovery)
+ self.have = have
+ self.result.update(dict(exist_discovery=discovery))
+ return discovery
+
+ def delete_exist_discovery(self, params):
+ """
+ Delete an existing discovery in the Cisco Catalyst Center by its ID.
+
+ Parameters:
+ - params: A dictionary containing the parameters for the delete
+ operation, including the ID of the discovery to delete.
+
+ Returns:
+ - task_id: The ID of the task created for the delete operation.
+ """
+
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function="delete_discovery_by_id",
+ params=params,
+ )
+
+ self.log("Response collected from API 'delete_discovery_by_id': {0}".format(str(response)), "DEBUG")
+ self.result.update(dict(delete_discovery=response))
+ self.log("Task Id of the deletion task is {0}".format(response.response.get('taskId')), "INFO")
+ return response.response.get('taskId')
+
+ def get_diff_merged(self):
+ """
+ Retrieve the information of devices discovered by a specific discovery
+ process in the Cisco Catalyst Center, delete existing discoveries if they exist,
+ and create a new discovery. The function also updates various
+ attributes of the class instance.
+
+ Returns:
+ - self: The instance of the class with updated attributes.
+ """
+
+ devices_list_info = self.get_devices_list_info()
+ ip_address_list = self.preprocess_device_discovery(devices_list_info)
+ exist_discovery = self.get_exist_discovery()
+ if exist_discovery:
+ params = dict(id=exist_discovery.get('id'))
+ discovery_task_id = self.delete_exist_discovery(params=params)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+
+ discovery_task_id = self.create_discovery(
+ ip_address_list=ip_address_list)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+ discovery_task_info = self.get_discoveries_by_range_until_success()
+ result = self.get_discovery_device_info(discovery_id=discovery_task_info.get('id'))
+ self.result["changed"] = True
+ self.result['msg'] = "Discovery Created Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = discovery_task_id
+ self.result.update(dict(msg='Discovery Created Successfully'))
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def get_diff_deleted(self):
+ """
+ Delete an existing discovery in the Cisco Catalyst Center by its name, and
+ updates various attributes of the class instance. If no
+ discovery with the specified name is found, the function
+ updates the 'msg' attribute with an appropriate message.
+
+ Returns:
+ - self: The instance of the class with updated attributes.
+ """
+
+ if self.validated_config[0].get("delete_all"):
+ count_discoveries = self.dnac_apply['exec'](
+ family="discovery",
+ function="get_count_of_all_discovery_jobs",
+ )
+ if count_discoveries.get("response") == 0:
+ msg = "There are no discoveries present in the Discovery Dashboard for deletion"
+ self.result['msg'] = msg
+ self.log(msg, "WARNING")
+ self.result['response'] = self.validated_config[0]
+ return self
+
+ delete_all_response = self.dnac_apply['exec'](
+ family="discovery",
+ function="delete_all_discovery",
+ )
+ discovery_task_id = delete_all_response.get('response').get('taskId')
+ self.result["changed"] = True
+ self.result['msg'] = "All of the Discoveries Deleted Successfully"
+ self.result['diff'] = self.validated_config
+
+ else:
+ exist_discovery = self.get_exist_discovery()
+ if not exist_discovery:
+ self.result['msg'] = "Discovery {0} Not Found".format(
+ self.validated_config[0].get("discovery_name"))
+ self.log(self.result['msg'], "ERROR")
+ return self
+
+ params = dict(id=exist_discovery.get('id'))
+ discovery_task_id = self.delete_exist_discovery(params=params)
+ complete_discovery = self.get_task_status(task_id=discovery_task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Successfully deleted discovery"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = discovery_task_id
+
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's Discovery Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+ discovery_task_info = self.get_discoveries_by_range_until_success()
+ discovery_id = discovery_task_info.get('id')
+ params = dict(
+ id=discovery_id
+ )
+ response = self.dnac_apply['exec'](
+ family="discovery",
+ function='get_discovery_by_id',
+ params=params
+ )
+ discovery_name = config.get('discovery_name')
+ if response:
+ self.log("Requested Discovery with name {0} is completed".format(discovery_name), "INFO")
+
+ else:
+ self.log("Requested Discovery with name {0} is not completed".format(discovery_name), "WARNING")
+ self.status = "success"
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified discovery(s) exists in the Cisco Catalyst Center configuration's
+ Discovery Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for deleted state
+ if config.get("delete_all") is True:
+ count_discoveries = self.dnac_apply['exec'](
+ family="discovery",
+ function="get_count_of_all_discovery_jobs",
+ )
+ if count_discoveries == 0:
+ self.log("All discoveries are deleted", "INFO")
+ else:
+ self.log("All discoveries are not deleted", "WARNING")
+ self.status = "success"
+ return self
+
+ discovery_task_info = self.lookup_discovery_by_range_via_name()
+ discovery_name = config.get('discovery_name')
+ if discovery_task_info:
+ self.log("Requested Discovery with name {0} is present".format(discovery_name), "WARNING")
+
+ else:
+ self.log("Requested Discovery with name {0} is not present and deleted".format(discovery_name), "INFO")
+ self.status = "success"
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_discovery = Discovery(module)
+ config_verify = ccc_discovery.params.get("config_verify")
+
+ state = ccc_discovery.params.get("state")
+ if state not in ccc_discovery.supported_states:
+ ccc_discovery.status = "invalid"
+ ccc_discovery.msg = "State {0} is invalid".format(state)
+ ccc_discovery.check_return_status()
+
+ ccc_discovery.validate_input(state=state).check_return_status()
+ for config in ccc_discovery.validated_config:
+ ccc_discovery.reset_values()
+ ccc_discovery.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_discovery.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_discovery.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py b/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py
new file mode 100644
index 000000000..675c11c91
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py
@@ -0,0 +1,3644 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2022, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Madhan Sankaranarayanan, Abhishek Maheshwari")
+
+DOCUMENTATION = r"""
+---
+module: inventory_intent
+short_description: Resource module for Network Device
+description:
+- Manage operations create, update and delete of the resource Network Device.
+- Adds the device with given credential.
+- Deletes the network device for the given Id.
+- Sync the devices provided as input.
+version_added: '6.8.0'
+extends_documentation_fragment:
+ - cisco.dnac.intent_params
+author: Abhishek Maheshwari (@abmahesh)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description: List of devices with credentails to perform Add/Update/Delete/Resync operation
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ type:
+ description: Select Device's type from NETWORK_DEVICE, COMPUTE_DEVICE, MERAKI_DASHBOARD, THIRD_PARTY_DEVICE, FIREPOWER_MANAGEMENT_SYSTEM.
+ NETWORK_DEVICE - This refers to traditional networking equipment such as routers, switches, access points, and firewalls. These devices
+ are responsible for routing, switching, and providing connectivity within the network.
+ COMPUTE_DEVICE - These are computing resources such as servers, virtual machines, or containers that are part of the network infrastructure.
+ Cisco Catalyst Center can integrate with compute devices to provide visibility and management capabilities, ensuring that the network and
+ compute resources work together seamlessly to support applications and services.
+ MERAKI_DASHBOARD - It is cloud-based platform used to manage Meraki networking devices, including wireless access points, switches, security
+ appliances, and cameras.
+ THIRD_PARTY_DEVICE - This category encompasses devices from vendors other than Cisco or Meraki. Cisco Catalyst Center is designed to support
+ integration with third-party devices through open standards and APIs. This allows organizations to manage heterogeneous network
+ environments efficiently using Cisco Catalyst Center's centralized management and automation capabilities.
+ FIREPOWER_MANAGEMENT_SYSTEM - It is a centralized management console used to manage Cisco's Firepower Next-Generation Firewall (NGFW) devices.
+ It provides features such as policy management, threat detection, and advanced security analytics.
+ type: str
+ default: "NETWORK_DEVICE"
+ cli_transport:
+ description: The essential prerequisite for adding Network devices is the specification of the transport
+ protocol (either SSH or Telnet) used by the device.
+ type: str
+ compute_device:
+ description: Indicates whether a device is a compute device.
+ type: bool
+ password:
+ description: Password for accessing the device and for file encryption during device export. Required for
+ adding Network Device. Also needed for file encryption while exporting device in a csv file.
+ type: str
+ enable_password:
+ description: Password required for enabling configurations on the device.
+ type: str
+ extended_discovery_info:
+ description: Additional discovery information for the device.
+ type: str
+ http_password:
+ description: HTTP password required for adding compute, Meraki, and Firepower Management Devices.
+ type: str
+ http_port:
+ description: HTTP port number required for adding compute and Firepower Management Devices.
+ type: str
+ http_secure:
+ description: Flag indicating HTTP security.
+ type: bool
+ http_username:
+ description: HTTP username required for adding compute and Firepower Management Devices.
+ type: str
+ ip_address_list:
+ description: A list of the IP addresses for the devices. It is required for tasks such as adding, updating, deleting,
+ or resyncing devices, with Meraki devices being the exception.
+ elements: str
+ type: list
+ hostname_list:
+ description: "A list of hostnames representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses."
+ type: list
+ elements: str
+ serial_number_list:
+ description: A list of serial numbers representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses.
+ type: list
+ elements: str
+ mac_address_list:
+ description: "A list of MAC addresses representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses."
+ type: list
+ elements: str
+ netconf_port:
+ description: Specifies the port number for connecting to devices using the Netconf protocol. Netconf (Network Configuration Protocol)
+ is used for managing network devices. Ensure that the provided port number corresponds to the Netconf service port configured
+ on your network devices.
+ NETCONF with user privilege 15 is mandatory for enabling Wireless Services on Wireless capable devices such as Catalyst 9000 series
+ Switches and C9800 Series Wireless Controllers. The NETCONF credentials are required to connect to C9800 Series Wireless Controllers
+ as the majority of data collection is done using NETCONF for these Devices.
+ type: str
+ username:
+ description: Username for accessing the device. Required for Adding Network Device.
+ type: str
+ snmp_auth_passphrase:
+ description: SNMP authentication passphrase required for adding network, compute, and third-party devices.
+ type: str
+ snmp_auth_protocol:
+ description: SNMP authentication protocol.
+ SHA (Secure Hash Algorithm) - cryptographic hash function commonly used for data integrity verification and authentication purposes.
+ type: str
+ default: "SHA"
+ snmp_mode:
+ description: Device's snmp Mode refer to different SNMP (Simple Network Management Protocol) versions and their corresponding security levels.
+ NOAUTHNOPRIV - This mode provides no authentication or encryption for SNMP messages. It means that devices communicating using SNMPv1 do
+ not require any authentication (username/password) or encryption (data confidentiality). This makes it the least secure option.
+ AUTHNOPRIV - This mode provides authentication but no encryption for SNMP messages. Authentication involves validating the source of the
+ SNMP messages using a community string (similar to a password). However, the data transmitted between devices is not encrypted,
+ so it's susceptible to eavesdropping.
+ AUTHPRIV - This mode provides both authentication and encryption for SNMP messages. It offers the highest level of security among the three
+ options. Authentication ensures that the source of the messages is genuine, and encryption ensures that the data exchanged between
+ devices is confidential and cannot be intercepted by unauthorized parties.
+ type: str
+ snmp_priv_passphrase:
+ description: SNMP private passphrase required for adding network, compute, and third-party devices.
+ type: str
+ snmp_priv_protocol:
+ description: SNMP private protocol required for adding network, compute, and third-party devices.
+ type: str
+ snmp_ro_community:
+ description: SNMP Read-Only community required for adding V2C devices.
+ type: str
+ snmp_rw_community:
+ description: SNMP Read-Write community required for adding V2C devices.
+ type: str
+ snmp_retry:
+ description: SNMP retry count.
+ type: int
+ default: 3
+ snmp_timeout:
+ description: SNMP timeout duration.
+ type: int
+ default: 5
+ snmp_username:
+ description: SNMP username required for adding network, compute, and third-party devices.
+ type: str
+ snmp_version:
+ description: It is a standard protocol used for managing and monitoring network devices.
+ v2 - In this communication between the SNMP manager (such as Cisco Catalyst) and the managed devices
+ (such as routers, switches, or access points) is based on community strings.Community strings serve
+ as form of authentication and they are transmitted in clear text, providing no encryption.
+ v3 - It is the most secure version of SNMP, providing authentication, integrity, and encryption features.
+ It allows for the use of usernames, authentication passwords, and encryption keys, providing stronger
+ security compared to v2.
+ type: str
+ update_mgmt_ipaddresslist:
+ description: List of updated management IP addresses for network devices.
+ type: list
+ elements: dict
+ suboptions:
+ exist_mgmt_ipaddress:
+ description: Device's existing Mgmt IpAddress.
+ type: str
+ new_mgmt_ipaddress:
+ description: Device's new Mgmt IpAddress.
+ type: str
+ force_sync:
+ description: If forcesync is true then device sync would run in high priority thread if available, else the sync will fail.
+ type: bool
+ default: False
+ device_resync:
+ description: Make this as true needed for the resyncing of device.
+ type: bool
+ default: False
+ reboot_device:
+ description: Make this as true needed for the Rebooting of Access Points.
+ type: bool
+ default: False
+ credential_update:
+ description: Make this as true needed for the updation of device credentials and other device details.
+ type: bool
+ default: False
+ clean_config:
+ description: Required if need to delete the Provisioned device by clearing current configuration.
+ type: bool
+ default: False
+ role:
+ description: Role of device which can be ACCESS, CORE, DISTRIBUTION, BORDER ROUTER, UNKNOWN.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ add_user_defined_field:
+ description: This operation will take dictionary as a parameter and in this we give details to
+ create/update/delete/assign multiple UDF to a device.
+ type: dict
+ suboptions:
+ name:
+ description: Name of Global User Defined Field. Required for creating/deleting UDF and then assigning it to device.
+ type: str
+ description:
+ description: Info about the global user defined field. Also used while updating interface details.
+ type: str
+ value:
+ description: Value to assign to tag with or without the same user defined field name.
+ type: str
+ update_interface_details:
+ description: This operation will take dictionary as a parameter and in this we give details to update interface details of device.
+ type: dict
+ suboptions:
+ description:
+ description: Specifies the description of the interface of the device.
+ type: str
+ interface_name:
+ description: Specify the list of interface names to update the details of the device interface.
+ (For example, GigabitEthernet1/0/11, FortyGigabitEthernet1/1/2)
+ type: list
+ elements: str
+ vlan_id:
+ description: Unique Id number assigned to a VLAN within a network used only while updating interface details.
+ type: int
+ voice_vlan_id:
+ description: Identifier used to distinguish a specific VLAN that is dedicated to voice traffic used only while updating interface details.
+ type: int
+ deployment_mode:
+ description: Preview/Deploy [Preview means the configuration is not pushed to the device. Deploy makes the configuration pushed to the device]
+ type: str
+ default: "Deploy"
+ clear_mac_address_table:
+ description: Set this to true if you need to clear the MAC address table for a specific device's interface. It's a boolean type,
+ with a default value of False.
+ type: bool
+ default: False
+ admin_status:
+ description: Status of Interface of a device, it can be (UP/DOWN).
+ type: str
+ export_device_list:
+ description: This operation take dictionary as parameter and export the device details as well as device credentials
+ details in a csv file.
+ type: dict
+ suboptions:
+ password:
+ description: Specifies the password for the encryption of file while exporting the device credentails into the file.
+ type: str
+ site_name:
+ description: Indicates the exact location where the wired device will be provisioned. This is a string value that should
+ represent the complete hierarchical path of the site (For example, "Global/USA/San Francisco/BGL_18/floor_pnp").
+ type: str
+ operation_enum:
+ description: enum(CREDENTIALDETAILS, DEVICEDETAILS) 0 to export Device Credential Details Or 1 to export Device Details.
+ CREDENTIALDETAILS - Used for exporting device credentials details like snpm credntials, device crdentails etc.
+ DEVICEDETAILS - Used for exporting device specific details like device hostname, serial number, type, family etc.
+ type: str
+ parameters:
+ description: List of device parameters that needs to be exported to file.(For example, ["componentName", "SerialNumber", "Last Sync Status"])
+ type: list
+ elements: str
+ provision_wired_device:
+ description: This parameter takes a list of dictionaries. Each dictionary provides the IP address of a wired device and
+ the name of the site where the device will be provisioned.
+ type: list
+ elements: dict
+ suboptions:
+ device_ip:
+ description: Specifies the IP address of the wired device. This is a string value that should be in the format of
+ standard IPv4 or IPv6 addresses.
+ type: str
+ version_added: 6.12.0
+ site_name:
+ description: Indicates the exact location where the wired device will be provisioned. This is a string value that should
+ represent the complete hierarchical path of the site (For example, "Global/USA/San Francisco/BGL_18/floor_pnp").
+ type: str
+ resync_retry_count:
+ description: Determines the total number of retry attempts for checking if the device has reached a managed state during
+ the provisioning process. If unspecified, the default value is set to 200 retries.
+ type: int
+ default: 200
+ version_added: 6.12.0
+ resync_retry_interval:
+ description: Sets the interval, in seconds, at which the system will recheck the device status throughout the provisioning
+ process. If unspecified, the system will check the device status every 2 seconds by default.
+ type: int
+ default: 2
+ version_added: 6.12.0
+
+requirements:
+- dnacentersdk >= 2.5.5
+- python >= 3.5
+seealso:
+- name: Cisco Catalyst Center documentation for Devices AddDevice2
+ description: Complete reference of the AddDevice2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-device
+- name: Cisco Catalyst Center documentation for Devices DeleteDeviceById
+ description: Complete reference of the DeleteDeviceById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-device-by-id
+- name: Cisco Catalyst Center documentation for Devices SyncDevices2
+ description: Complete reference of the SyncDevices2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!sync-devices
+notes:
+ - SDK Method used are
+ devices.Devices.add_device,
+ devices.Devices.delete_device_by_id,
+ devices.Devices.sync_devices,
+
+ - Paths used are
+ post /dna/intent/api/v1/network-device,
+ delete /dna/intent/api/v1/network-device/{id},
+ put /dna/intent/api/v1/network-device,
+
+ - Removed 'managementIpAddress' options in v4.3.0.
+ - Renamed argument 'ip_address' to 'ip_address_list' option in v6.12.0.
+ - Removed 'serial_number', 'device_added', 'role_source', options in v6.12.0.
+ - Added 'add_user_defined_field', 'update_interface_details', 'export_device_list' options in v6.13.1.
+ - Removed 'provision_wireless_device', 'reprovision_wired_device' options in v6.13.1.
+ - Added the parameter 'admin_status' options in v6.13.1.
+ - Removed 'device_updated' options in v6.13.1.
+
+"""
+
+EXAMPLES = r"""
+- name: Add new device in Inventory with full credentials
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - cli_transport: ssh
+ compute_device: False
+ password: Test@123
+ enable_password: Test@1234
+ extended_discovery_info: test
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ http_secure: False
+ ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ netconf_port: 830
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ snmp_version: v3
+ type: NETWORK_DEVICE
+ username: cisco
+
+- name: Add new Compute device in Inventory with full credentials.Inputs needed for Compute Device
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ compute_device: True
+ username: cisco
+ type: "COMPUTE_DEVICE"
+
+- name: Add new Meraki device in Inventory with full credentials.Inputs needed for Meraki Device.
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - http_password: "test"
+ type: "MERAKI_DASHBOARD"
+
+- name: Add new Firepower Management device in Inventory with full credentials.Input needed to add Device.
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ type: "FIREPOWER_MANAGEMENT_SYSTEM"
+
+- name: Add new Third Party device in Inventory with full credentials.Input needed to add Device.
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ type: "THIRD_PARTY_DEVICE"
+
+- name: Update device details or credentails in Inventory
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - cli_transport: telnet
+ compute_device: False
+ password: newtest123
+ enable_password: newtest1233
+ ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ type: NETWORK_DEVICE
+ credential_update: True
+
+- name: Update new management IP address of device in inventory
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1"]
+ credential_update: True
+ update_mgmt_ipaddresslist:
+ - exist_mgmt_ipaddress: "1.1.1.1"
+ new_mgmt_ipaddress: "12.12.12.12"
+
+- name: Associate Wired Devices to site and Provisioned it in Inventory
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - provision_wired_device:
+ - device_ip: "1.1.1.1"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_pnp"
+ resync_retry_count: 200
+ resync_interval: 2
+ - device_ip: "2.2.2.2"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_test"
+ resync_retry_count: 200
+ resync_retry_interval: 2
+
+- name: Update Device Role with IP Address
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ role: ACCESS
+
+- name: Update Interface details with IP Address
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ update_interface_details:
+ description: "Testing for updating interface details"
+ admin_status: "UP"
+ vlan_id: 23
+ voice_vlan_id: 45
+ deployment_mode: "Deploy"
+ interface_name: ["GigabitEthernet1/0/11", FortyGigabitEthernet1/1/1]
+ clear_mac_address_table: True
+
+- name: Export Device Details in a CSV file Interface details with IP Address
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ export_device_list:
+ password: "File_password"
+ operation_enum: "0"
+ parameters: ["componentName", "SerialNumber", "Last Sync Status"]
+
+- name: Create Global User Defined with IP Address
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ add_user_defined_field:
+ - name: Test123
+ description: "Added first udf for testing"
+ value: "value123"
+ - name: Test321
+ description: "Added second udf for testing"
+ value: "value321"
+
+- name: Resync Device with IP Addresses
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ device_resync: True
+ force_sync: False
+
+- name: Reboot AP Devices with IP Addresses
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ reboot_device: True
+
+- name: Delete Provision/Unprovision Devices by IP Address
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: False
+ dnac_log_level: "{{dnac_log_level}}"
+ state: deleted
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ clean_config: False
+
+- name: Delete Global User Defined Field with name
+ cisco.dnac.inventory_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ add_user_defined_field:
+ - name: Test123
+ - name: Test321
+
+"""
+
+RETURN = r"""
+
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+# common approach when a module relies on optional dependencies that are not available during the validation process.
+try:
+ import pyzipper
+ HAS_PYZIPPER = True
+except ImportError:
+ HAS_PYZIPPER = False
+ pyzipper = None
+
+import csv
+import time
+from datetime import datetime
+from io import BytesIO, StringIO
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+)
+# Defer this feature as API issue is there once it's fixed we will addresses it in upcoming release iac2.0
+support_for_provisioning_wireless = False
+
+
+class DnacDevice(DnacBase):
+ """Class containing member attributes for Inventory intent module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.supported_states = ["merged", "deleted"]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed', and
+ 'self.msg' will describe the validation issues.
+ """
+
+ temp_spec = {
+ 'cli_transport': {'type': 'str'},
+ 'compute_device': {'type': 'bool'},
+ 'enable_password': {'type': 'str'},
+ 'extended_discovery_info': {'type': 'str'},
+ 'http_password': {'type': 'str'},
+ 'http_port': {'type': 'str'},
+ 'http_secure': {'type': 'bool'},
+ 'http_username': {'type': 'str'},
+ 'ip_address_list': {'type': 'list', 'elements': 'str'},
+ 'hostname_list': {'type': 'list', 'elements': 'str'},
+ 'serial_number_list': {'type': 'list', 'elements': 'str'},
+ 'mac_address_list': {'type': 'list', 'elements': 'str'},
+ 'netconf_port': {'type': 'str'},
+ 'password': {'type': 'str'},
+ 'snmp_auth_passphrase': {'type': 'str'},
+ 'snmp_auth_protocol': {'default': "SHA", 'type': 'str'},
+ 'snmp_mode': {'type': 'str'},
+ 'snmp_priv_passphrase': {'type': 'str'},
+ 'snmp_priv_protocol': {'type': 'str'},
+ 'snmp_ro_community': {'type': 'str'},
+ 'snmp_rw_community': {'type': 'str'},
+ 'snmp_retry': {'default': 3, 'type': 'int'},
+ 'snmp_timeout': {'default': 5, 'type': 'int'},
+ 'snmp_username': {'type': 'str'},
+ 'snmp_version': {'type': 'str'},
+ 'update_mgmt_ipaddresslist': {'type': 'list', 'elements': 'dict'},
+ 'username': {'type': 'str'},
+ 'role': {'type': 'str'},
+ 'device_resync': {'type': 'bool'},
+ 'reboot_device': {'type': 'bool'},
+ 'credential_update': {'type': 'bool'},
+ 'force_sync': {'type': 'bool'},
+ 'clean_config': {'type': 'bool'},
+ 'add_user_defined_field': {
+ 'type': 'list',
+ 'name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'value': {'type': 'str'},
+ },
+ 'update_interface_details': {
+ 'type': 'dict',
+ 'description': {'type': 'str'},
+ 'vlan_id': {'type': 'int'},
+ 'voice_vlan_id': {'type': 'int'},
+ 'interface_name': {'type': 'list', 'elements': 'str'},
+ 'deployment_mode': {'default': 'Deploy', 'type': 'str'},
+ 'clear_mac_address_table': {'default': False, 'type': 'bool'},
+ 'admin_status': {'type': 'str'},
+ },
+ 'export_device_list': {
+ 'type': 'dict',
+ 'password': {'type': 'str'},
+ 'operation_enum': {'type': 'str'},
+ 'parameters': {'type': 'list', 'elements': 'str'},
+ },
+ 'provision_wired_device': {
+ 'type': 'list',
+ 'device_ip': {'type': 'str'},
+ 'site_name': {'type': 'str'},
+ 'resync_retry_count': {'default': 200, 'type': 'int'},
+ 'resync_retry_interval': {'default': 2, 'type': 'int'},
+ }
+ }
+
+ # Validate device params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_device_ips_from_config_priority(self):
+ """
+ Retrieve device IPs based on the configuration.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ list: A list containing device IPs.
+ Description:
+ This method retrieves device IPs based on the priority order specified in the configuration.
+ It first checks if device IPs are available. If not, it checks hostnames, serial numbers,
+ and MAC addresses in order and retrieves IPs based on availability.
+ If none of the information is available, an empty list is returned.
+ """
+ # Retrieve device IPs from the configuration
+ device_ips = self.config[0].get("ip_address_list")
+
+ if device_ips:
+ return device_ips
+
+ # If device IPs are not available, check hostnames
+ device_hostnames = self.config[0].get("hostname_list")
+ if device_hostnames:
+ return self.get_device_ips_from_hostname(device_hostnames)
+
+ # If hostnames are not available, check serial numbers
+ device_serial_numbers = self.config[0].get("serial_number_list")
+ if device_serial_numbers:
+ return self.get_device_ips_from_serial_number(device_serial_numbers)
+
+ # If serial numbers are not available, check MAC addresses
+ device_mac_addresses = self.config[0].get("mac_address_list")
+ if device_mac_addresses:
+ return self.get_device_ips_from_mac_address(device_mac_addresses)
+
+ # If no information is available, return an empty list
+ return []
+
+ def device_exists_in_dnac(self):
+ """
+ Check which devices already exists in Cisco Catalyst Center and return both device_exist and device_not_exist in dnac.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ list: A list of devices that exist in Cisco Catalyst Center.
+ Description:
+ Queries Cisco Catalyst Center to check which devices are already present in Cisco Catalyst Center and store
+ its management IP address in the list of devices that exist.
+ Example:
+ To use this method, create an instance of the class and call 'device_exists_in_dnac' on it,
+ The method returns a list of management IP addressesfor devices that exist in Cisco Catalyst Center.
+ """
+
+ device_in_dnac = []
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ )
+
+ except Exception as e:
+ error_message = "Error while fetching device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "CRITICAL")
+ raise Exception(error_message)
+
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ for ip in response:
+ device_ip = ip["managementIpAddress"]
+ device_in_dnac.append(device_ip)
+
+ return device_in_dnac
+
+ def is_udf_exist(self, field_name):
+ """
+ Check if a Global User Defined Field exists in Cisco Catalyst Center based on its name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ field_name (str): The name of the Global User Defined Field.
+ Returns:
+ bool: True if the Global User Defined Field exists, False otherwise.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve all Global User Defined Fields
+ with the specified name. If matching field is found, the function returns True, indicating that
+ the field exists else returns False.
+ """
+
+ response = self.dnac._exec(
+ family="devices",
+ function='get_all_user_defined_fields',
+ params={"name": field_name},
+ )
+
+ self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG")
+ udf = response.get("response")
+
+ if (len(udf) == 1):
+ return True
+
+ message = "Global User Defined Field with name '{0}' doesnot exist in Cisco Catalyst Center".format(field_name)
+ self.log(message, "INFO")
+
+ return False
+
+ def create_user_defined_field(self, udf):
+ """
+ Create a Global User Defined Field in Cisco Catalyst Center based on the provided configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ udf (dict): A dictionary having the payload for the creation of user defined field(UDF) in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function retrieves the configuration for adding a user-defined field from the configuration object,
+ sends the request to Cisco Catalyst Center to create the field, and logs the response.
+ """
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='create_user_defined_field',
+ params=udf,
+ )
+ self.log("Received API response from 'create_user_defined_field': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ field_name = udf.get('name')
+ self.log("Global User Defined Field with name '{0}' created successfully".format(field_name), "INFO")
+ self.status = "success"
+
+ except Exception as e:
+ error_message = "Error while creating Global UDF(User Defined Field) in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return self
+
+ def add_field_to_devices(self, device_ids, udf):
+ """
+ Add a Global user-defined field with specified details to a list of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ids (list): A list of device IDs to which the user-defined field will be added.
+ udf (dict): A dictionary having the user defined field details including name and value.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function retrieves the details of the user-defined field from the configuration object,
+ including the field name and default value then iterates over list of device IDs, creating a payload for
+ each device and sending the request to Cisco Catalyst Center to add the user-defined field.
+ """
+ # field_details = self.config[0].get('add_user_defined_field')
+ field_name = udf.get('name')
+ field_value = udf.get('value', '1')
+ for device_id in device_ids:
+ payload = {}
+ payload['name'] = field_name
+ payload['value'] = field_value
+ udf_param_dict = {
+ 'payload': [payload],
+ 'device_id': device_id
+ }
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='add_user_defined_field_to_device',
+ params=udf_param_dict,
+ )
+ self.log("Received API response from 'add_user_defined_field_to_device': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ self.status = "success"
+ self.result['changed'] = True
+
+ except Exception as e:
+ self.status = "failed"
+ error_message = "Error while adding Global UDF to device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ self.result['changed'] = False
+
+ return self
+
+ def trigger_export_api(self, payload_params):
+ """
+ Triggers the export API to generate a CSV file containing device details based on the given payload parameters.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ payload_params (dict): A dictionary containing parameters required for the export API.
+ Returns:
+ dict: The response from the export API, including information about the task and file ID.
+ If the export is successful, the CSV file can be downloaded using the file ID.
+ Description:
+ The function initiates the export API in Cisco Catalyst Center to generate a CSV file containing detailed information
+ about devices.The response from the API includes task details and a file ID.
+ """
+
+ response = self.dnac._exec(
+ family="devices",
+ function='export_device_list',
+ op_modifies=True,
+ params=payload_params,
+ )
+ self.log("Received API response from 'export_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("additionalStatusURL"):
+ file_id = execution_details.get("additionalStatusURL").split("/")[-1]
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Could not get the File ID because of {0} so can't export device details in csv file".format(failure_reason)
+ else:
+ self.msg = "Could not get the File ID so can't export device details in csv file"
+ self.log(self.msg, "ERROR")
+
+ return response
+
+ # With this File ID call the Download File by FileID API and process the response
+ response = self.dnac._exec(
+ family="file",
+ function='download_a_file_by_fileid',
+ op_modifies=True,
+ params={"file_id": file_id},
+ )
+ self.log("Received API response from 'download_a_file_by_fileid': {0}".format(str(response)), "DEBUG")
+
+ return response
+
+ def decrypt_and_read_csv(self, response, password):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (requests.Response): HTTP response object containing the encrypted CSV file.
+ password (str): Password used for decrypting the CSV file.
+ Returns:
+ csv.DictReader: A CSV reader object for the decrypted content, allowing iteration over rows as dictionaries.
+ Description:
+ Decrypts and reads a CSV-like file from the given HTTP response using the provided password.
+ """
+
+ zip_data = BytesIO(response.data)
+
+ if not HAS_PYZIPPER:
+ self.msg = "pyzipper is required for this module. Install pyzipper to use this functionality."
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ snmp_protocol = self.config[0].get('snmp_priv_protocol', 'AES128')
+ encryption_dict = {
+ 'AES128': 'pyzipper.WZ_AES128',
+ 'AES192': 'pyzipper.WZ_AES192',
+ 'AES256': 'pyzipper.WZ_AES',
+ 'CISCOAES128': 'pyzipper.WZ_AES128',
+ 'CISCOAES192': 'pyzipper.WZ_AES192',
+ 'CISCOAES256': 'pyzipper.WZ_AES'
+ }
+ try:
+ encryption_method = encryption_dict.get(snmp_protocol)
+ except Exception as e:
+ self.log("Given SNMP protcol '{0}' not present".format(snmp_protocol), "WARNING")
+
+ if not encryption_method:
+ self.msg = "Invalid SNMP protocol '{0}' specified for encryption.".format(snmp_protocol)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ # Create a PyZipper object with the password
+ with pyzipper.AESZipFile(zip_data, 'r', compression=pyzipper.ZIP_LZMA, encryption=encryption_method) as zip_ref:
+ # Assuming there is a single file in the zip archive
+ file_name = zip_ref.namelist()[0]
+
+ # Extract the content of the file with the provided password
+ file_content_binary = zip_ref.read(file_name, pwd=password.encode('utf-8'))
+
+ # Now 'file_content_binary' contains the binary content of the decrypted file
+ # Since the content is text, so we can decode it
+ file_content_text = file_content_binary.decode('utf-8')
+
+ # Now 'file_content_text' contains the text content of the decrypted file
+ self.log("Text content of decrypted file: {0}".format(file_content_text), "DEBUG")
+
+ # Parse the CSV-like string into a list of dictionaries
+ csv_reader = csv.DictReader(StringIO(file_content_text))
+
+ return csv_reader
+
+ def export_device_details(self):
+ """
+ Export device details from Cisco Catalyst Center into a CSV file.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function exports device details from Cisco Catalyst Center based on the provided IP addresses in the configuration.
+ It retrieves the device UUIDs, calls the export device list API, and downloads the exported data of both device details and
+ and device credentials with an encrtypted zip file with password into CSV format.
+ The CSV data is then parsed and written to a file.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+
+ if not device_ips:
+ self.status = "failed"
+ self.msg = "Cannot export device details as no devices are specified in the playbook"
+ self.log(self.msg, "ERROR")
+ return self
+
+ try:
+ device_uuids = self.get_device_ids(device_ips)
+
+ if not device_uuids:
+ self.status = "failed"
+ self.result['changed'] = False
+ self.msg = "Could not find device UUIDs for exporting device details"
+ self.log(self.msg, "ERROR")
+ return self
+
+ # Now all device UUID get collected so call the export device list API
+ export_device_list = self.config[0].get('export_device_list')
+ password = export_device_list.get("password")
+
+ if not self.is_valid_password(password):
+ self.status = "failed"
+ detailed_msg = """Invalid password. Min password length is 8 and it should contain atleast one lower case letter,
+ one uppercase letter, one digit and one special characters from -=\\;,./~!@#$%^&*()_+{}[]|:?"""
+ formatted_msg = ' '.join(line.strip() for line in detailed_msg.splitlines())
+ self.msg = formatted_msg
+ self.log(formatted_msg, "INFO")
+ return self
+
+ payload_params = {
+ "deviceUuids": device_uuids,
+ "password": password,
+ "operationEnum": export_device_list.get("operation_enum", "0"),
+ "parameters": export_device_list.get("parameters")
+ }
+
+ response = self.trigger_export_api(payload_params)
+ self.check_return_status()
+
+ if payload_params["operationEnum"] == "0":
+ temp_file_name = response.filename
+ output_file_name = temp_file_name.split(".")[0] + ".csv"
+ csv_reader = self.decrypt_and_read_csv(response, password)
+ self.check_return_status()
+ else:
+ decoded_resp = response.data.decode(encoding='utf-8')
+ self.log("Decoded response of Export Device Credential file: {0}".format(str(decoded_resp)), "DEBUG")
+
+ # Parse the CSV-like string into a list of dictionaries
+ csv_reader = csv.DictReader(StringIO(decoded_resp))
+ current_date = datetime.now()
+ formatted_date = current_date.strftime("%m-%d-%Y")
+ output_file_name = "devices-" + str(formatted_date) + ".csv"
+
+ device_data = []
+ for row in csv_reader:
+ device_data.append(row)
+
+ # Write the data to a CSV file
+ with open(output_file_name, 'w', newline='') as csv_file:
+ fieldnames = device_data[0].keys()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
+ csv_writer.writeheader()
+ csv_writer.writerows(device_data)
+
+ self.msg = "Device Details Exported Successfully to the CSV file: {0}".format(output_file_name)
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.msg = "Error while exporting device details into CSV file for device(s): '{0}'".format(str(device_ips))
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+
+ return self
+
+ def get_ap_devices(self, device_ips):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the response is to be retrieved.
+ Returns:
+ list: A list containing Access Point device IP's obtained from the Cisco Catalyst Center.
+ Description:
+ This method communicates with Cisco Catalyst Center to retrieve the details of a device with the specified
+ management IP address and check if device family matched to Unified AP. It executes the 'get_device_list'
+ API call with the provided device IP address, logs the response, and returns list containing ap device ips.
+ """
+
+ ap_device_list = []
+ for device_ip in device_ips:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response', [])
+
+ if response and response[0].get('family', '') == "Unified AP":
+ ap_device_list.append(device_ip)
+ except Exception as e:
+ error_message = "Error while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "CRITICAL")
+ raise Exception(error_message)
+
+ return ap_device_list
+
+ def resync_devices(self):
+ """
+ Resync devices in Cisco Catalyst Center.
+ This function performs the Resync operation for the devices specified in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function expects the following parameters in the configuration:
+ - "ip_address_list": List of device IP addresses to be resynced.
+ - "force_sync": (Optional) Whether to force sync the devices. Defaults to "False".
+ """
+
+ # Code for triggers the resync operation using the retrieved device IDs and force sync parameter.
+ device_ips = self.get_device_ips_from_config_priority()
+ input_device_ips = device_ips.copy()
+ device_in_dnac = self.device_exists_in_dnac()
+
+ for device_ip in input_device_ips:
+ if device_ip not in device_in_dnac:
+ input_device_ips.remove(device_ip)
+
+ ap_devices = self.get_ap_devices(input_device_ips)
+ self.log("AP Devices from the playbook input are: {0}".format(str(ap_devices)), "INFO")
+
+ if ap_devices:
+ for ap_ip in ap_devices:
+ input_device_ips.remove(ap_ip)
+ self.log("Following devices {0} are AP, so can't perform resync operation.".format(str(ap_devices)), "WARNING")
+
+ if not input_device_ips:
+ self.msg = "Cannot perform the Resync operation as the device(s) with IP(s) {0} are not present in Cisco Catalyst Center".format(str(device_ips))
+ self.status = "success"
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ device_ids = self.get_device_ids(input_device_ips)
+ try:
+ force_sync = self.config[0].get("force_sync", False)
+ resync_param_dict = {
+ 'payload': device_ids,
+ 'force_sync': force_sync
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices_using_forcesync',
+ op_modifies=True,
+ params=resync_param_dict,
+ )
+ self.log("Received API response from 'sync_devices_using_forcesync': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'Synced' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Devices have been successfully resynced. Devices resynced: {0}".format(str(input_device_ips))
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device resynced get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device resynced get failed."
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ self.status = "failed"
+ error_message = "Error while resyncing device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return self
+
+ def reboot_access_points(self):
+ """
+ Reboot access points in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function performs a reboot operation on access points in Cisco Catalyst Center based on the provided IP addresses
+ in the configuration. It retrieves the AP devices' MAC addresses, calls the reboot access points API, and monitors
+ the progress of the reboot operation.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+ input_device_ips = device_ips.copy()
+
+ if input_device_ips:
+ ap_devices = self.get_ap_devices(input_device_ips)
+ self.log("AP Devices from the playbook input are: {0}".format(str(ap_devices)), "INFO")
+ for device_ip in input_device_ips:
+ if device_ip not in ap_devices:
+ input_device_ips.remove(device_ip)
+
+ if not input_device_ips:
+ self.msg = "No AP Devices IP given in the playbook so can't perform reboot operation"
+ self.status = "success"
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ # Get and store the apEthernetMacAddress of given devices
+ ap_mac_address_list = []
+ for device_ip in input_device_ips:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')
+ if not response:
+ continue
+
+ response = response[0]
+ ap_mac_address = response.get('apEthernetMacAddress')
+
+ if ap_mac_address is not None:
+ ap_mac_address_list.append(ap_mac_address)
+
+ if not ap_mac_address_list:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Cannot find the AP devices for rebooting"
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ # Now call the Reboot Access Point API
+ reboot_params = {
+ "apMacAddresses": ap_mac_address_list
+ }
+ response = self.dnac._exec(
+ family="wireless",
+ function='reboot_access_points',
+ op_modifies=True,
+ params=reboot_params,
+ )
+ self.log(str(response))
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'url' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "AP Device(s) {0} successfully rebooted!".format(str(input_device_ips))
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "AP Device Rebooting get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "AP Device Rebooting get failed"
+ self.log(self.msg, "ERROR")
+ break
+
+ return self
+
+ def handle_successful_provisioning(self, device_ip, execution_details, device_type):
+ """
+ Handle successful provisioning of Wired/Wireless device.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the provisioned device.
+ - execution_details (str): Details of the provisioning execution.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning of a device.
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.log("{0} Device {1} provisioned successfully!!".format(device_type, device_ip), "INFO")
+
+ def handle_failed_provisioning(self, device_ip, execution_details, device_type):
+ """
+ Handle failed provisioning of Wired/Wireless device.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the device that failed provisioning.
+ - execution_details (dict): Details of the failed provisioning execution in key "failureReason" indicating reason for failure.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the failure of provisioning for a device.
+ """
+
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason", "Unknown failure reason")
+ self.msg = "{0} Device Provisioning failed for {1} because of {2}".format(device_type, device_ip, failure_reason)
+ self.log(self.msg, "WARNING")
+
+ def handle_provisioning_exception(self, device_ip, exception, device_type):
+ """
+ Handle an exception during the provisioning process of Wired/Wireless device..
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the device involved in provisioning.
+ - exception (Exception): The exception raised during provisioning.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method logs an error message indicating an exception occurred during the provisioning process for a device.
+ """
+
+ error_message = "Error while Provisioning the {0} device {1} in Cisco Catalyst Center: {2}".format(device_type, device_ip, str(exception))
+ self.log(error_message, "ERROR")
+
+ def handle_all_already_provisioned(self, device_ips, device_type):
+ """
+ Handle successful provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning for all devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.msg = "All the {0} Devices '{1}' given in the playbook are already Provisioned".format(device_type, str(device_ips))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ self.result['changed'] = False
+
+ def handle_all_provisioned(self, device_type):
+ """
+ Handle successful provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the provisioned devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning for all devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.log("All {0} Devices provisioned successfully!!".format(device_type), "INFO")
+
+ def handle_all_failed_provision(self, device_type):
+ """
+ Handle failure of provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status and logs a failure message indicating that
+ provisioning failed for all devices of a specific type.
+ """
+
+ self.status = "failed"
+ self.msg = "{0} Device Provisioning failed for all devices".format(device_type)
+ self.log(self.msg, "INFO")
+
+ def handle_partially_provisioned(self, provision_count, device_type):
+ """
+ Handle partial success in provisioning for devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - provision_count (int): The count of devices that were successfully provisioned.
+ - device_type (str): The type or category of the provisioned devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs a partial success message indicating that provisioning was successful
+ for a certain number of devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.log("{0} Devices provisioned successfully partially for {1} devices".format(device_type, provision_count), "INFO")
+
+ def provisioned_wired_device(self):
+ """
+ Provision wired devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function provisions wired devices in Cisco Catalyst Center based on the configuration provided.
+ It retrieves the site name and IP addresses of the devices from the list of configuration,
+ attempts to provision each device with site, and monitors the provisioning process.
+ """
+
+ provision_wired_list = self.config[0]['provision_wired_device']
+ total_devices_to_provisioned = len(provision_wired_list)
+ device_ip_list = []
+ provision_count, already_provision_count = 0, 0
+
+ for prov_dict in provision_wired_list:
+ managed_flag = False
+ device_ip = prov_dict['device_ip']
+ device_ip_list.append(device_ip)
+ site_name = prov_dict['site_name']
+ device_type = "Wired"
+ resync_retry_count = prov_dict.get("resync_retry_count", 200)
+ # This resync retry interval will be in seconds which will check device status at given interval
+ resync_retry_interval = prov_dict.get("resync_retry_interval", 2)
+
+ if not site_name or not device_ip:
+ self.status = "failed"
+ self.msg = "Site and Device IP are required for Provisioning of Wired Devices."
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ provision_wired_params = {
+ 'deviceManagementIpAddress': device_ip,
+ 'siteNameHierarchy': site_name
+ }
+
+ # Check till device comes into managed state
+ while resync_retry_count:
+ response = self.get_device_response(device_ip)
+ self.log("Device is in {0} state waiting for Managed State.".format(response['managementState']), "DEBUG")
+
+ if (
+ response.get('managementState') == "Managed"
+ and response.get('collectionStatus') == "Managed"
+ and response.get("hostname")
+ ):
+ msg = """Device '{0}' comes to managed state and ready for provisioning with the resync_retry_count
+ '{1}' left having resync interval of {2} seconds""".format(device_ip, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = True
+ break
+
+ if response.get('collectionStatus') == "Partial Collection Failure" or response.get('collectionStatus') == "Could Not Synchronize":
+ device_status = response.get('collectionStatus')
+ msg = """Device '{0}' comes to '{1}' state and never goes for provisioning with the resync_retry_count
+ '{2}' left having resync interval of {3} seconds""".format(device_ip, device_status, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = False
+ break
+
+ time.sleep(resync_retry_interval)
+ resync_retry_count = resync_retry_count - 1
+
+ if not managed_flag:
+ self.log("""Device {0} is not transitioning to the managed state, so provisioning operation cannot
+ be performed.""".format(device_ip), "WARNING")
+ continue
+
+ try:
+ response = self.dnac._exec(
+ family="sda",
+ function='provision_wired_device',
+ op_modifies=True,
+ params=provision_wired_params,
+ )
+
+ if response.get("status") == "failed":
+ description = response.get("description")
+ error_msg = "Cannot do Provisioning for device {0} beacuse of {1}".format(device_ip, description)
+ self.log(error_msg)
+ continue
+
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+
+ if 'TASK_PROVISION' in progress:
+ self.handle_successful_provisioning(device_ip, execution_details, device_type)
+ provision_count += 1
+ break
+ elif execution_details.get("isError"):
+ self.handle_failed_provisioning(device_ip, execution_details, device_type)
+ break
+
+ except Exception as e:
+ # Not returning from here as there might be possiblity that for some devices it comes into exception
+ # but for others it gets provision successfully or If some devices are already provsioned
+ self.handle_provisioning_exception(device_ip, e, device_type)
+ if "already provisioned" in str(e):
+ self.log(str(e), "INFO")
+ already_provision_count += 1
+
+ # Check If all the devices are already provsioned, return from here only
+ if already_provision_count == total_devices_to_provisioned:
+ self.handle_all_already_provisioned(device_ip_list, device_type)
+ elif provision_count == total_devices_to_provisioned:
+ self.handle_all_provisioned(device_type)
+ elif provision_count == 0:
+ self.handle_all_failed_provision(device_type)
+ else:
+ self.handle_partially_provisioned(provision_count, device_type)
+
+ return self
+
+ def get_wireless_param(self, prov_dict):
+ """
+ Get wireless provisioning parameters for a device.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ prov_dict (dict): A dictionary containing configuration parameters for wireless provisioning.
+ Returns:
+ wireless_param (list of dict): A list containing a dictionary with wireless provisioning parameters.
+ Description:
+ This function constructs a list containing a dictionary with wireless provisioning parameters based on the
+ configuration provided in the playbook. It validates the managed AP locations, ensuring they are of type "floor."
+ The function then queries Cisco Catalyst Center to get network device details using the provided device IP.
+ If the device is not found, the function returns the class instance with appropriate status and log messages and
+ returns the wireless provisioning parameters containing site information, managed AP
+ locations, dynamic interfaces, and device name.
+ """
+
+ try:
+ device_ip_address = prov_dict['device_ip']
+ site_name = prov_dict['site_name']
+
+ wireless_param = [
+ {
+ 'site': site_name,
+ 'managedAPLocations': prov_dict['managed_ap_locations'],
+ }
+ ]
+
+ for ap_loc in wireless_param[0]["managedAPLocations"]:
+ if self.get_site_type(site_name=ap_loc) != "floor":
+ self.status = "failed"
+ self.msg = "Managed AP Location must be a floor"
+ self.log(self.msg, "ERROR")
+ return self
+
+ wireless_param[0]["dynamicInterfaces"] = []
+
+ for interface in prov_dict.get("dynamic_interfaces"):
+ interface_dict = {
+ "interfaceIPAddress": interface.get("interface_ip_address"),
+ "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_cidr"),
+ "interfaceGateway": interface.get("interface_gateway"),
+ "lagOrPortNumber": interface.get("lag_or_port_number"),
+ "vlanId": interface.get("vlan_id"),
+ "interfaceName": interface.get("interface_name")
+ }
+ wireless_param[0]["dynamicInterfaces"].append(interface_dict)
+
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": device_ip_address}
+ )
+
+ if not response:
+ self.status = "failed"
+ self.msg = "Device Host name is not present in the Cisco Catalyst Center"
+ self.log(self.msg, "INFO")
+ return self
+
+ response = response.get("response")
+ wireless_param[0]["deviceName"] = response.get("hostname")
+ self.wireless_param = wireless_param
+ self.status = "success"
+ self.log("Successfully collected all the parameters required for Wireless Provisioning", "DEBUG")
+
+ except Exception as e:
+ self.msg = """An exception occured while fetching the details for wireless provisioning of
+ device '{0}' due to - {1}""".format(device_ip_address, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_site_type(self, site_name):
+ """
+ Get the type of a site in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_name (str): The name of the site for which to retrieve the type.
+ Returns:
+ site_type (str or None): The type of the specified site, or None if the site is not found.
+ Description:
+ This function queries Cisco Catalyst Center to retrieve the type of a specified site. It uses the
+ get_site API with the provided site name, extracts the site type from the response, and returns it.
+ If the specified site is not found, the function returns None, and an appropriate log message is generated.
+ """
+
+ try:
+ site_type = None
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+
+ if not response:
+ self.msg = "Site '{0}' not found".format(site_name)
+ self.log(self.msg, "INFO")
+ return site_type
+
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+
+ except Exception as e:
+ self.msg = "Error while fetching the site '{0}' and the specified site was not found in Cisco Catalyst Center.".format(site_name)
+ self.module.fail_json(msg=self.msg, response=[self.msg])
+
+ return site_type
+
+ def provisioned_wireless_devices(self):
+ """
+ Provision Wireless devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function performs wireless provisioning for the provided list of device IP addresses.
+ It iterates through each device, retrieves provisioning parameters using the get_wireless_param function,
+ and then calls the Cisco Catalyst Center API for wireless provisioning. If all devices are already provisioned,
+ it returns success with a relevant message.
+ """
+
+ provision_count, already_provision_count = 0, 0
+ device_type = "Wireless"
+ device_ip_list = []
+ provision_wireless_list = self.config[0]['provision_wireless_device']
+
+ for prov_dict in provision_wireless_list:
+ try:
+ # Collect the device parameters from the playbook to perform wireless provisioing
+ self.get_wireless_param(prov_dict).check_return_status()
+ device_ip = prov_dict['device_ip']
+ device_ip_list.append(device_ip)
+ provisioning_params = self.wireless_param
+ resync_retry_count = prov_dict.get("resync_retry_count", 200)
+ # This resync retry interval will be in seconds which will check device status at given interval
+ resync_retry_interval = prov_dict.get("resync_retry_interval", 2)
+ managed_flag = True
+
+ # Check till device comes into managed state
+ while resync_retry_count:
+ response = self.get_device_response(device_ip)
+ self.log("Device is in {0} state waiting for Managed State.".format(response['managementState']), "DEBUG")
+
+ if (
+ response.get('managementState') == "Managed"
+ and response.get('collectionStatus') == "Managed"
+ and response.get("hostname")
+ ):
+ msg = """Device '{0}' comes to managed state and ready for provisioning with the resync_retry_count
+ '{1}' left having resync interval of {2} seconds""".format(device_ip, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = True
+ break
+
+ if response.get('collectionStatus') == "Partial Collection Failure" or response.get('collectionStatus') == "Could Not Synchronize":
+ device_status = response.get('collectionStatus')
+ msg = """Device '{0}' comes to '{1}' state and never goes for provisioning with the resync_retry_count
+ '{2}' left having resync interval of {3} seconds""".format(device_ip, device_status, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = False
+ break
+
+ time.sleep(resync_retry_interval)
+ resync_retry_count = resync_retry_count - 1
+
+ if not managed_flag:
+ self.log("""Device {0} is not transitioning to the managed state, so provisioning operation cannot
+ be performed.""".format(device_ip), "WARNING")
+ continue
+
+ # Now we have provisioning_param so we can do wireless provisioning
+ response = self.dnac_apply['exec'](
+ family="wireless",
+ function="provision",
+ op_modifies=True,
+ params=provisioning_params,
+ )
+
+ if response.get("status") == "failed":
+ description = response.get("description")
+ error_msg = "Cannot do Provisioning for Wireless device {0} beacuse of {1}".format(device_ip, description)
+ self.log(error_msg, "ERROR")
+ continue
+
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+ if 'TASK_PROVISION' in progress:
+ self.handle_successful_provisioning(device_ip, execution_details, device_type)
+ provision_count += 1
+ break
+ elif execution_details.get("isError"):
+ self.handle_failed_provisioning(device_ip, execution_details, device_type)
+ break
+
+ except Exception as e:
+ # Not returning from here as there might be possiblity that for some devices it comes into exception
+ # but for others it gets provision successfully or If some devices are already provsioned
+ self.handle_provisioning_exception(device_ip, e, device_type)
+ if "already provisioned" in str(e):
+ self.msg = "Device '{0}' already provisioned".format(device_ip)
+ self.log(self.msg, "INFO")
+ already_provision_count += 1
+
+ # Check If all the devices are already provsioned, return from here only
+ if already_provision_count == len(device_ip_list):
+ self.handle_all_already_provisioned(device_ip_list, device_type)
+ elif provision_count == len(device_ip_list):
+ self.handle_all_provisioned(device_type)
+ elif provision_count == 0:
+ self.handle_all_failed_provision(device_type)
+ else:
+ self.handle_partially_provisioned(provision_count, device_type)
+
+ return self
+
+ def get_udf_id(self, field_name):
+ """
+ Get the ID of a Global User Defined Field in Cisco Catalyst Center based on its name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ field_name (str): The name of the Global User Defined Field.
+ Returns:
+ str: The ID of the Global User Defined Field.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve all Global User Defined Fields
+ with the specified name and extracts the ID of the first matching field.If successful, it returns
+ the ID else returns None.
+ """
+
+ try:
+ udf_id = None
+ response = self.dnac._exec(
+ family="devices",
+ function='get_all_user_defined_fields',
+ params={"name": field_name},
+ )
+ self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG")
+ udf = response.get("response")
+ if udf:
+ udf_id = udf[0].get("id")
+
+ except Exception as e:
+ error_message = "Exception occurred while getting Global User Defined Fields(UDF) ID from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return udf_id
+
+ def mandatory_parameter(self):
+ """
+ Check for and validate mandatory parameters for adding network devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ dict: The input `config` dictionary if all mandatory parameters are present.
+ Description:
+ It will check the mandatory parameters for adding the devices in Cisco Catalyst Center.
+ """
+
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ params_dict = {
+ "NETWORK_DEVICE": ["ip_address_list", "password", "username"],
+ "COMPUTE_DEVICE": ["ip_address_list", "http_username", "http_password", "http_port"],
+ "MERAKI_DASHBOARD": ["http_password"],
+ "FIREPOWER_MANAGEMENT_SYSTEM": ["ip_address_list", "http_username", "http_password"],
+ "THIRD_PARTY_DEVICE": ["ip_address_list"]
+ }
+
+ params_list = params_dict.get(device_type, [])
+
+ mandatory_params_absent = []
+ for param in params_list:
+ if param not in self.config[0]:
+ mandatory_params_absent.append(param)
+
+ if mandatory_params_absent:
+ self.status = "failed"
+ self.msg = "Required parameters {0} for adding devices are not present".format(str(mandatory_params_absent))
+ self.result['msg'] = self.msg
+ self.log(self.msg, "ERROR")
+ else:
+ self.status = "success"
+ self.msg = "Required parameter for Adding the devices in Inventory are present."
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_have(self, config):
+ """
+ Retrieve and check device information with Cisco Catalyst Center to determine if devices already exist.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ config (dict): A dictionary containing the configuration details of devices to be checked.
+ Returns:
+ dict: A dictionary containing information about the devices in the playbook, devices that exist in
+ Cisco Catalyst Center, and devices that are not present in Cisco Catalyst Center.
+ Description:
+ This function checks the specified devices in the playbook against the devices existing in Cisco Catalyst Center with following keys:
+ - "want_device": A list of devices specified in the playbook.
+ - "device_in_dnac": A list of devices that already exist in Cisco Catalyst Center.
+ - "device_not_in_dnac": A list of devices that are not present in Cisco Catalyst Center.
+ """
+
+ have = {}
+ want_device = self.get_device_ips_from_config_priority()
+
+ # Get the list of device that are present in Cisco Catalyst Center
+ device_in_dnac = self.device_exists_in_dnac()
+ device_not_in_dnac, devices_in_playbook = [], []
+
+ for ip in want_device:
+ devices_in_playbook.append(ip)
+ if ip not in device_in_dnac:
+ device_not_in_dnac.append(ip)
+
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0].get('provision_wired_device')
+
+ for prov_dict in provision_wired_list:
+ device_ip_address = prov_dict['device_ip']
+ if device_ip_address not in want_device:
+ devices_in_playbook.append(device_ip_address)
+ if device_ip_address not in device_in_dnac:
+ device_not_in_dnac.append(device_ip_address)
+
+ if support_for_provisioning_wireless:
+ if self.config[0].get('provision_wireless_device'):
+ provision_wireless_list = self.config[0].get('provision_wireless_device')
+
+ for prov_dict in provision_wireless_list:
+ device_ip_address = prov_dict['device_ip']
+ if device_ip_address not in want_device and device_ip_address not in devices_in_playbook:
+ devices_in_playbook.append(device_ip_address)
+ if device_ip_address not in device_in_dnac and device_ip_address not in device_not_in_dnac:
+ device_not_in_dnac.append(device_ip_address)
+
+ self.log("Device(s) {0} exists in Cisco Catalyst Center".format(str(device_in_dnac)), "INFO")
+ have["want_device"] = want_device
+ have["device_in_dnac"] = device_in_dnac
+ have["device_not_in_dnac"] = device_not_in_dnac
+ have["devices_in_playbook"] = devices_in_playbook
+
+ self.have = have
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_device_params(self, params):
+ """
+ Extract and store device parameters from the playbook for device processing in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ params (dict): A dictionary containing device parameters retrieved from the playbook.
+ Returns:
+ dict: A dictionary containing the extracted device parameters.
+ Description:
+ This function will extract and store parameters in dictionary for adding, updating, editing, or deleting devices Cisco Catalyst Center.
+ """
+
+ device_param = {
+ "cliTransport": params.get("cli_transport"),
+ "enablePassword": params.get("enable_password"),
+ "password": params.get("password"),
+ "ipAddress": params.get("ip_address_list"),
+ "snmpAuthPassphrase": params.get("snmp_auth_passphrase"),
+ "snmpAuthProtocol": params.get("snmp_auth_protocol"),
+ "snmpMode": params.get("snmp_mode"),
+ "snmpPrivPassphrase": params.get("snmp_priv_passphrase"),
+ "snmpPrivProtocol": params.get("snmp_priv_protocol"),
+ "snmpROCommunity": params.get("snmp_ro_community"),
+ "snmpRWCommunity": params.get("snmp_rw_community"),
+ "snmpRetry": params.get("snmp_retry"),
+ "snmpTimeout": params.get("snmp_timeout"),
+ "snmpUserName": params.get("snmp_username"),
+ "userName": params.get("username"),
+ "computeDevice": params.get("compute_device"),
+ "extendedDiscoveryInfo": params.get("extended_discovery_info"),
+ "httpPassword": params.get("http_password"),
+ "httpPort": params.get("http_port"),
+ "httpSecure": params.get("http_secure"),
+ "httpUserName": params.get("http_username"),
+ "netconfPort": params.get("netconf_port"),
+ "snmpVersion": params.get("snmp_version"),
+ "type": params.get("type"),
+ "updateMgmtIPaddressList": params.get("update_mgmt_ipaddresslist"),
+ "forceSync": params.get("force_sync"),
+ "cleanConfig": params.get("clean_config")
+ }
+
+ if device_param.get("updateMgmtIPaddressList"):
+ device_mngmt_dict = device_param.get("updateMgmtIPaddressList")[0]
+ device_param["updateMgmtIPaddressList"][0] = {}
+
+ device_param["updateMgmtIPaddressList"][0].update(
+ {
+ "existMgmtIpAddress": device_mngmt_dict.get("exist_mgmt_ipaddress"),
+ "newMgmtIpAddress": device_mngmt_dict.get("new_mgmt_ipaddress")
+ })
+
+ return device_param
+
+ def get_device_ids(self, device_ips):
+ """
+ Get the list of unique device IDs for list of specified management IP addresses of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ips (list): The management IP addresses of devices for which you want to retrieve the device IDs.
+ Returns:
+ list: The list of unique device IDs for the specified devices.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device ID associated with a device having the specified
+ IP address. If the device is not found in Cisco Catalyst Center, then print the log message with error severity.
+ """
+
+ device_ids = []
+
+ for device_ip in device_ips:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if not response:
+ continue
+ device_id = response[0]["id"]
+ device_ids.append(device_id)
+
+ except Exception as e:
+ error_message = "Error while fetching device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ids
+
+ def get_device_ips_from_hostname(self, hostname_list):
+ """
+ Get the list of unique device IPs for list of specified hostnames of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ hostname_list (list): The hostnames of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices hostname list.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IP's associated with a device having the specified
+ list of hostnames. If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for hostname in hostname_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"hostname": hostname}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_device_ips_from_serial_number(self, serial_number_list):
+ """
+ Get the list of unique device IPs for a specified list of serial numbers in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ serial_number_list (list): The list of serial number of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices with serial numbers.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IPs associated with a device having the specified
+ serial numbers.If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for serial_number in serial_number_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"serialNumber": serial_number}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center - {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_device_ips_from_mac_address(self, mac_address_list):
+ """
+ Get the list of unique device IPs for list of specified mac address of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ mac_address_list (list): The list of mac address of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IPs associated with a device having the specified
+ mac addresses. If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for mac_address in mac_address_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"macAddress": mac_address}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center - {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_interface_from_id_and_name(self, device_id, interface_name):
+ """
+ Retrieve the interface ID for a device in Cisco Catalyst Center based on device id and interface name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_id (str): The id of the device.
+ interface_name (str): Name of the interface for which details need to be collected.
+ Returns:
+ str: The interface ID for the specified device and interface name.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve the interface information
+ for the device with the provided device id and interface name and extracts the interface ID from the
+ response, and returns the interface ID.
+ """
+
+ try:
+ interface_detail_params = {
+ 'device_id': device_id,
+ 'name': interface_name
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_details',
+ params=interface_detail_params
+ )
+ self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if response:
+ self.status = "success"
+ interface_id = response["id"]
+ self.log("""Successfully fetched interface ID ({0}) by using device id {1} and interface name {2}."""
+ .format(interface_id, device_id, interface_name), "INFO")
+ return interface_id
+
+ except Exception as e:
+ error_message = "Error while fetching interface id for interface({0}) from Cisco Catalyst Center: {1}".format(interface_name, str(e))
+ self.log(error_message, "ERROR")
+ self.msg = error_message
+ self.status = "failed"
+ return self
+
+ def get_interface_from_ip(self, device_ip):
+ """
+ Get the interface ID for a device in Cisco Catalyst Center based on its IP address.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The IP address of the device.
+ Returns:
+ str: The interface ID for the specified device.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve the interface information
+ for the device with the provided IP address and extracts the interface ID from the
+ response, and returns the interface ID.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_by_ip',
+ params={"ip_address": device_ip}
+ )
+ self.log("Received API response from 'get_interface_by_ip': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if response:
+ interface_id = response[0]["id"]
+ self.log("Fetch Interface Id for device '{0}' successfully !!".format(device_ip))
+ return interface_id
+
+ except Exception as e:
+ error_message = "Error while fetching Interface Id for device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def get_device_response(self, device_ip):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the response is to be retrieved.
+ Returns:
+ dict: A dictionary containing details of the device obtained from the Cisco Catalyst Center.
+ Description:
+ This method communicates with Cisco Catalyst Center to retrieve the details of a device with the specified
+ management IP address. It executes the 'get_device_list' API call with the provided device IP address,
+ logs the response, and returns a dictionary containing information about the device.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')[0]
+
+ except Exception as e:
+ error_message = "Error while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ return response
+
+ def check_device_role(self, device_ip):
+ """
+ Checks if the device role and role source for a device in Cisco Catalyst Center match the specified values in the configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the device role is to be checked.
+ Returns:
+ bool: True if the device role and role source match the specified values, False otherwise.
+ Description:
+ This method retrieves the device role and role source for a device in Cisco Catalyst Center using the
+ 'get_device_response' method and compares the retrieved values with specified values in the configuration
+ for updating device roles.
+ """
+
+ role = self.config[0].get('role')
+ response = self.get_device_response(device_ip)
+
+ return response.get('role') == role
+
+ def check_interface_details(self, device_ip, interface_name):
+ """
+ Checks if the interface details for a device in Cisco Catalyst Center match the specified values in the configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which interface details are to be checked.
+ Returns:
+ bool: True if the interface details match the specified values, False otherwise.
+ Description:
+ This method retrieves the interface details for a device in Cisco Catalyst Center using the 'get_interface_by_ip' API call.
+ It then compares the retrieved details with the specified values in the configuration for updating interface details.
+ If all specified parameters match the retrieved values or are not provided in the playbook parameters, the function
+ returns True, indicating successful validation.
+ """
+ device_id = self.get_device_ids([device_ip])
+
+ if not device_id:
+ self.log("""Error: Device with IP '{0}' not found in Cisco Catalyst Center.Unable to update interface details."""
+ .format(device_ip), "ERROR")
+ return False
+
+ interface_detail_params = {
+ 'device_id': device_id[0],
+ 'name': interface_name
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_details',
+ params=interface_detail_params
+ )
+ self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if not response:
+ self.log("No response received from the API 'get_interface_details'.", "DEBUG")
+ return False
+
+ response_params = {
+ 'description': response.get('description'),
+ 'adminStatus': response.get('adminStatus'),
+ 'voiceVlanId': response.get('voiceVlan'),
+ 'vlanId': int(response.get('vlanId'))
+ }
+
+ interface_playbook_params = self.config[0].get('update_interface_details')
+ playbook_params = {
+ 'description': interface_playbook_params.get('description', ''),
+ 'adminStatus': interface_playbook_params.get('admin_status'),
+ 'voiceVlanId': interface_playbook_params.get('voice_vlan_id', ''),
+ 'vlanId': interface_playbook_params.get('vlan_id')
+ }
+
+ for key, value in playbook_params.items():
+ if not value:
+ continue
+ elif response_params[key] != value:
+ return False
+
+ return True
+
+ def check_credential_update(self):
+ """
+ Checks if the credentials for devices in the configuration match the updated values in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ bool: True if the credentials match the updated values, False otherwise.
+ Description:
+ This method triggers the export API in Cisco Catalyst Center to obtain the updated credential details for
+ the specified devices. It then decrypts and reads the CSV file containing the updated credentials,
+ comparing them with the credentials specified in the configuration.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+ device_uuids = self.get_device_ids(device_ips)
+ password = "Testing@123"
+ payload_params = {"deviceUuids": device_uuids, "password": password, "operationEnum": "0"}
+ response = self.trigger_export_api(payload_params)
+ self.check_return_status()
+ csv_reader = self.decrypt_and_read_csv(response, password)
+ self.check_return_status()
+ device_data = next(csv_reader, None)
+
+ if not device_data:
+ return False
+
+ csv_data_dict = {
+ 'snmp_retry': device_data['snmp_retries'],
+ 'username': device_data['cli_username'],
+ 'password': device_data['cli_password'],
+ 'enable_password': device_data['cli_enable_password'],
+ 'snmp_username': device_data['snmpv3_user_name'],
+ 'snmp_auth_protocol': device_data['snmpv3_auth_type'],
+ }
+
+ config = self.config[0]
+ for key in csv_data_dict:
+ if key in config and csv_data_dict[key] is not None:
+ if key == "snmp_retry" and int(csv_data_dict[key]) != int(config[key]):
+ return False
+ elif csv_data_dict[key] != config[key]:
+ return False
+
+ return True
+
+ def get_provision_wired_device(self, device_ip):
+ """
+ Retrieves the provisioning status of a wired device with the specified management IP address in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the wired device for which provisioning status is to be retrieved.
+ Returns:
+ bool: True if the device is provisioned successfully, False otherwise.
+ Description:
+ This method communicates with Cisco Catalyst Center to check the provisioning status of a wired device.
+ It executes the 'get_provisioned_wired_device' API call with the provided device IP address and
+ logs the response.
+ """
+
+ response = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ op_modifies=True,
+ params={"device_management_ip_address": device_ip}
+ )
+
+ if response.get("status") == "failed":
+ self.log("Cannot do provisioning for wired device {0} because of {1}.".format(device_ip, response.get('description')), "ERROR")
+ return False
+
+ return True
+
+ def clear_mac_address(self, interface_id, deploy_mode, interface_name):
+ """
+ Clear the MAC address table on a specific interface of a device.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ interface_id (str): The UUID of the interface where the MAC addresses will be cleared.
+ deploy_mode (str): The deployment mode of the device.
+ interface_name(str): The name of the interface for which the MAC addresses will be cleared.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function clears the MAC address table on a specific interface of a device.
+ The 'deploy_mode' parameter specifies the deployment mode of the device.
+ If the operation is successful, the function returns the response from the API call.
+ If an error occurs during the operation, the function logs the error details and updates the status accordingly.
+ """
+
+ try:
+ payload = {
+ "operation": "ClearMacAddress",
+ "payload": {}
+ }
+ clear_mac_address_payload = {
+ 'payload': payload,
+ 'interface_uuid': interface_id,
+ 'deployment_mode': deploy_mode
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='clear_mac_address_table',
+ op_modifies=True,
+ params=clear_mac_address_payload,
+ )
+ self.log("Received API response from 'clear_mac_address_table': {0}".format(str(response)), "DEBUG")
+
+ if not (response and isinstance(response, dict)):
+ self.status = "failed"
+ self.msg = """Received an empty response from the API 'clear_mac_address_table'. This indicates a failure to clear
+ the Mac address table for the interface '{0}'""".format(interface_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Failed to clear the Mac address table for the interface '{0}' due to {1}".format(interface_name, failure_reason)
+ else:
+ self.msg = "Failed to clear the Mac address table for the interface '{0}'".format(interface_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+ elif 'clear mac address-table' in execution_details.get("data"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Successfully executed the task of clearing the Mac address table for interface '{0}'".format(interface_name)
+ self.log(self.msg, "INFO")
+ break
+
+ except Exception as e:
+ error_msg = """An exception occurred during the process of clearing the MAC address table for interface {0}, due to -
+ {1}""".format(interface_name, str(e))
+ self.log(error_msg, "WARNING")
+ self.result['changed'] = False
+ self.result['response'] = error_msg
+
+ return self
+
+ def update_interface_detail_of_device(self, device_to_update):
+ """
+ Update interface details for a device in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_to_update (list): A list of IP addresses of devices to be updated.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method updates interface details for devices in Cisco Catalyst Center.
+ It iterates over the list of devices to be updated, retrieves interface parameters from the configuration,
+ calls the update interface details API with the required parameters, and checks the execution response.
+ If the update is successful, it sets the status to 'success' and logs an informational message.
+ """
+
+ # Call the Get interface details by device IP API and fetch the interface Id
+ for device_ip in device_to_update:
+ interface_params = self.config[0].get('update_interface_details')
+ interface_names_list = interface_params.get('interface_name')
+ for interface_name in interface_names_list:
+ device_id = self.get_device_ids([device_ip])
+ interface_id = self.get_interface_from_id_and_name(device_id[0], interface_name)
+ self.check_return_status()
+
+ # Now we call update interface details api with required parameter
+ try:
+ interface_params = self.config[0].get('update_interface_details')
+ clear_mac_address_table = interface_params.get("clear_mac_address_table", False)
+
+ if clear_mac_address_table:
+ response = self.get_device_response(device_ip)
+
+ if response.get('role').upper() != "ACCESS":
+ self.msg = "The action to clear the MAC Address table is only supported for devices with the ACCESS role."
+ self.log(self.msg, "WARNING")
+ self.result['response'] = self.msg
+ else:
+ deploy_mode = interface_params.get('deployment_mode', 'Deploy')
+ self.clear_mac_address(interface_id, deploy_mode, interface_name)
+ self.check_return_status()
+
+ temp_params = {
+ 'description': interface_params.get('description', ''),
+ 'adminStatus': interface_params.get('admin_status'),
+ 'voiceVlanId': interface_params.get('voice_vlan_id'),
+ 'vlanId': interface_params.get('vlan_id')
+ }
+ payload_params = {}
+ for key, value in temp_params.items():
+ if value is not None:
+ payload_params[key] = value
+
+ update_interface_params = {
+ 'payload': payload_params,
+ 'interface_uuid': interface_id,
+ 'deployment_mode': interface_params.get('deployment_mode', 'Deploy')
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='update_interface_details',
+ op_modifies=True,
+ params=update_interface_params,
+ )
+ self.log("Received API response from 'update_interface_details': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'SUCCESS' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Updated Interface Details for device '{0}' successfully".format(device_ip)
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Interface Updation get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Interface Updation get failed"
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ error_message = "Error while updating interface details in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Port actions are only supported on user facing/access ports as it's not allowed or No Updation required"
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def check_managementip_execution_response(self, response, device_ip, new_mgmt_ipaddress):
+ """
+ Check the execution response of a management IP update task.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (dict): The response received after initiating the management IP update task.
+ device_ip (str): The IP address of the device for which the management IP was updated.
+ new_mgmt_ipaddress (str): The new management IP address of the device.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the execution response of a management IP update task in Cisco Catalyst Center.
+ It continuously queries the task details until the task is completed or an error occurs.
+ If the task is successful, it sets the status to 'success' and logs an informational message.
+ If the task fails, it sets the status to 'failed' and logs an error message with the failure reason, if available.
+ """
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device new management IP updation for device '{0}' get failed due to {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device new management IP updation for device '{0}' get failed".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ elif execution_details.get("endTime"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = """Device '{0}' present in Cisco Catalyst Center and new management ip '{1}' have been
+ updated successfully""".format(device_ip, new_mgmt_ipaddress)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+
+ return self
+
+ def check_device_update_execution_response(self, response, device_ip):
+ """
+ Check the execution response of a device update task.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (dict): The response received after initiating the device update task.
+ device_ip (str): The IP address of the device for which the update is performed.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the execution response of a device update task in Cisco Catalyst Center.
+ It continuously queries the task details until the task is completed or an error occurs.
+ If the task is successful, it sets the status to 'success' and logs an informational message.
+ If the task fails, it sets the status to 'failed' and logs an error message with the failure reason, if available.
+ """
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device Updation for device '{0}' get failed due to {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device Updation for device '{0}' get failed".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ elif execution_details.get("endTime"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Device '{0}' present in Cisco Catalyst Center and have been updated successfully".format(device_ip)
+ self.log(self.msg, "INFO")
+ break
+
+ return self
+
+ def is_device_exist_in_ccc(self, device_ip):
+ """
+ Check if a device with the given IP exists in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The IP address of the device to check.
+ Returns:
+ bool: True if the device exists, False otherwise.
+ Description:
+ This method queries Cisco Catalyst Center to check if a device with the specified
+ management IP address exists. If the device exists, it returns True; otherwise,
+ it returns False. If an error occurs during the process, it logs an error message
+ and raises an exception.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')
+ if not response:
+ self.log("Device with given IP '{0}' is not present in Cisco Catalyst Center".format(device_ip), "INFO")
+ return False
+
+ return True
+
+ except Exception as e:
+ error_message = "Error while getting the response of device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def is_device_exist_for_update(self, device_to_update):
+ """
+ Check if the device(s) exist in Cisco Catalyst Center for update operation.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_to_update (list): A list of device(s) to be be checked present in Cisco Catalyst Center.
+ Returns:
+ bool: True if at least one of the devices to be updated exists in Cisco Catalyst Center,
+ False otherwise.
+ Description:
+ This function checks if any of the devices specified in the 'device_to_update' list
+ exists in Cisco Catalyst Center. It iterates through the list of devices and compares
+ each device with the list of devices present in Cisco Catalyst Center obtained from
+ 'self.have.get("device_in_ccc")'. If a match is found, it sets 'device_exist' to True
+ and breaks the loop.
+ """
+
+ # First check if device present in Cisco Catalyst Center or not
+ device_exist = False
+ for device in device_to_update:
+ if device in self.have.get("device_in_ccc"):
+ device_exist = True
+ break
+
+ return device_exist
+
+ def get_want(self, config):
+ """
+ Get all the device related information from playbook that is needed to be
+ add/update/delete/resync device in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing device-related information from the playbook.
+ Returns:
+ dict: A dictionary containing the extracted device parameters and other relevant information.
+ Description:
+ Retrieve all the device-related information from the playbook needed for adding, updating, deleting,
+ or resyncing devices in Cisco Catalyst Center.
+ """
+
+ want = {}
+ device_params = self.get_device_params(config)
+ want["device_params"] = device_params
+
+ self.want = want
+ self.msg = "Successfully collected all parameters from the playbook "
+ self.status = "success"
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Merge and process differences between existing devices and desired device configuration in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing the desired device configuration and relevant information from the playbook.
+ Returns:
+ object: An instance of the class with updated results and status based on the processing of differences.
+ Description:
+ The function processes the differences and, depending on the changes required, it may add, update,
+ or resynchronize devices in Cisco Catalyst Center.
+ The updated results and status are stored in the class instance for further use.
+ """
+
+ devices_to_add = self.have["device_not_in_dnac"]
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ device_resynced = self.config[0].get("device_resync", False)
+ device_reboot = self.config[0].get("reboot_device", False)
+ credential_update = self.config[0].get("credential_update", False)
+
+ config['type'] = device_type
+ if device_type == "FIREPOWER_MANAGEMENT_SYSTEM":
+ config['http_port'] = self.config[0].get("http_port", "443")
+
+ config['ip_address_list'] = devices_to_add
+
+ if self.config[0].get('update_mgmt_ipaddresslist'):
+ device_ip = self.config[0].get('update_mgmt_ipaddresslist')[0].get('existMgmtIpAddress')
+ is_device_exists = self.is_device_exist_in_ccc(device_ip)
+
+ if not is_device_exists:
+ self.status = "failed"
+ self.msg = """Unable to update the Management IP address because the device with IP '{0}' is not
+ found in Cisco Catalyst Center.""".format(device_ip)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if self.config[0].get('update_interface_details'):
+ device_to_update = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(device_to_update)
+
+ if not device_exist:
+ self.msg = """Unable to update interface details because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_to_update))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if self.config[0].get('role'):
+ devices_to_update_role = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(devices_to_update_role)
+
+ if not device_exist:
+ self.msg = """Unable to update device role because the device(s) listed: {0} are not present in the Cisco
+ Catalyst Center.""".format(str(devices_to_update_role))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if credential_update:
+ device_to_update = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(device_to_update)
+
+ if not device_exist:
+ self.msg = """Unable to edit device credentials/details because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_to_update))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if not config['ip_address_list']:
+ self.msg = "Devices '{0}' already present in Cisco Catalyst Center".format(self.have['devices_in_playbook'])
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # To add the devices in inventory
+ input_params = self.want.get("device_params")
+ device_params = input_params.copy()
+
+ if not device_params['snmpVersion']:
+ device_params['snmpVersion'] = "v3"
+
+ device_params['ipAddress'] = config['ip_address_list']
+ if device_params['snmpVersion'] == "v2":
+ params_to_remove = ["snmpAuthPassphrase", "snmpAuthProtocol", "snmpMode", "snmpPrivPassphrase", "snmpPrivProtocol", "snmpUserName"]
+ for param in params_to_remove:
+ device_params.pop(param, None)
+
+ if not device_params['snmpROCommunity']:
+ self.status = "failed"
+ self.msg = "Required parameter 'snmpROCommunity' for adding device with snmmp version v2 is not present"
+ self.result['msg'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+ else:
+ if not device_params['snmpMode']:
+ device_params['snmpMode'] = "AUTHPRIV"
+
+ if not device_params['cliTransport']:
+ device_params['cliTransport'] = "ssh"
+
+ if not device_params['snmpPrivProtocol']:
+ device_params['snmpPrivProtocol'] = "AES128"
+
+ if device_params['snmpPrivProtocol'] == "AES192":
+ device_params['snmpPrivProtocol'] = "CISCOAES192"
+ elif device_params['snmpPrivProtocol'] == "AES256":
+ device_params['snmpPrivProtocol'] = "CISCOAES256"
+
+ if device_params['snmpMode'] == "NOAUTHNOPRIV":
+ device_params.pop('snmpAuthPassphrase', None)
+ device_params.pop('snmpPrivPassphrase', None)
+ device_params.pop('snmpPrivProtocol', None)
+ device_params.pop('snmpAuthProtocol', None)
+ elif device_params['snmpMode'] == "AUTHNOPRIV":
+ device_params.pop('snmpPrivPassphrase', None)
+ device_params.pop('snmpPrivProtocol', None)
+
+ self.mandatory_parameter().check_return_status()
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='add_device',
+ op_modifies=True,
+ params=device_params,
+ )
+ self.log("Received API response from 'add_device': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if '/task/' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['response'] = execution_details
+
+ if len(devices_to_add) > 0:
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' added to Cisco Catalyst Center".format(str(devices_to_add))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ break
+ self.msg = "Device(s) '{0}' already present in Cisco Catalyst Center".format(str(self.config[0].get("ip_address_list")))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device addition get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device addition get failed"
+ self.log(self.msg, "ERROR")
+ self.result['msg'] = self.msg
+ return self
+
+ except Exception as e:
+ error_message = "Error while adding device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ # Update the role of devices having the role source as Manual
+ if self.config[0].get('role'):
+ devices_to_update_role = self.get_device_ips_from_config_priority()
+ device_role = self.config[0].get('role')
+ role_update_count = 0
+ for device_ip in devices_to_update_role:
+ device_id = self.get_device_ids([device_ip])
+
+ # Check if the same role of device is present in dnac then no need to change the state
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')[0]
+
+ if response.get('role') == device_role:
+ self.status = "success"
+ self.result['changed'] = False
+ role_update_count += 1
+ log_msg = "The device role '{0}' is already set in Cisco Catalyst Center, no update is needed.".format(device_role)
+ self.log(log_msg, "INFO")
+ continue
+
+ device_role_params = {
+ 'role': device_role,
+ 'roleSource': "MANUAL",
+ 'id': device_id[0]
+ }
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='update_device_role',
+ op_modifies=True,
+ params=device_role_params,
+ )
+ self.log("Received API response from 'update_device_role': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+
+ if 'successfully' in progress or 'succesfully' in progress:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(devices_to_update_role), device_role)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device role updation get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device role updation get failed"
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+
+ except Exception as e:
+ error_message = "Error while updating device role '{0}' in Cisco Catalyst Center: {1}".format(device_role, str(e))
+ self.log(error_message, "ERROR")
+
+ if role_update_count == len(devices_to_update_role):
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = """The device role '{0}' is already set in Cisco Catalyst Center, no device role update is needed for the
+ devices {1}.""".format(device_role, str(devices_to_update_role))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+
+ if credential_update:
+ device_to_update = self.get_device_ips_from_config_priority()
+ # Update Device details and credentails
+ device_uuids = self.get_device_ids(device_to_update)
+ password = "Testing@123"
+ export_payload = {"deviceUuids": device_uuids, "password": password, "operationEnum": "0"}
+ export_response = self.trigger_export_api(export_payload)
+ self.check_return_status()
+ csv_reader = self.decrypt_and_read_csv(export_response, password)
+ self.check_return_status()
+ device_details = {}
+
+ for row in csv_reader:
+ ip_address = row['ip_address']
+ device_details[ip_address] = row
+
+ for device_ip in device_to_update:
+ playbook_params = self.want.get("device_params").copy()
+ playbook_params['ipAddress'] = [device_ip]
+ device_data = device_details[device_ip]
+ if device_data['snmpv3_privacy_password'] == ' ':
+ device_data['snmpv3_privacy_password'] = None
+ if device_data['snmpv3_auth_password'] == ' ':
+ device_data['snmpv3_auth_password'] = None
+
+ if not playbook_params['snmpMode']:
+ if device_data['snmpv3_privacy_password']:
+ playbook_params['snmpMode'] = "AUTHPRIV"
+ elif device_data['snmpv3_auth_password']:
+ playbook_params['snmpMode'] = "AUTHNOPRIV"
+ else:
+ playbook_params['snmpMode'] = "NOAUTHNOPRIV"
+
+ if not playbook_params['cliTransport']:
+ if device_data['protocol'] == "ssh2":
+ playbook_params['cliTransport'] = "ssh"
+ else:
+ playbook_params['cliTransport'] = device_data['protocol']
+ if not playbook_params['snmpPrivProtocol']:
+ playbook_params['snmpPrivProtocol'] = device_data['snmpv3_privacy_type']
+
+ csv_data_dict = {
+ 'username': device_data['cli_username'],
+ 'password': device_data['cli_password'],
+ 'enable_password': device_data['cli_enable_password'],
+ 'netconf_port': device_data['netconf_port'],
+ }
+
+ if device_data['snmp_version'] == '3':
+ csv_data_dict['snmp_username'] = device_data['snmpv3_user_name']
+ if device_data['snmpv3_privacy_password']:
+ csv_data_dict['snmp_auth_passphrase'] = device_data['snmpv3_auth_password']
+ csv_data_dict['snmp_priv_passphrase'] = device_data['snmpv3_privacy_password']
+ else:
+ csv_data_dict['snmp_username'] = None
+
+ device_key_mapping = {
+ 'username': 'userName',
+ 'password': 'password',
+ 'enable_password': 'enablePassword',
+ 'snmp_username': 'snmpUserName',
+ 'netconf_port': 'netconfPort'
+ }
+ device_update_key_list = ["username", "password", "enable_password", "snmp_username", "netconf_port"]
+
+ for key in device_update_key_list:
+ mapped_key = device_key_mapping[key]
+
+ if playbook_params[mapped_key] is None:
+ playbook_params[mapped_key] = csv_data_dict[key]
+
+ if playbook_params['snmpMode'] == "AUTHPRIV":
+ if not playbook_params['snmpAuthPassphrase']:
+ playbook_params['snmpAuthPassphrase'] = csv_data_dict['snmp_auth_passphrase']
+ if not playbook_params['snmpPrivPassphrase']:
+ playbook_params['snmpPrivPassphrase'] = csv_data_dict['snmp_priv_passphrase']
+
+ if playbook_params['snmpPrivProtocol'] == "AES192":
+ playbook_params['snmpPrivProtocol'] = "CISCOAES192"
+ elif playbook_params['snmpPrivProtocol'] == "AES256":
+ playbook_params['snmpPrivProtocol'] = "CISCOAES256"
+
+ if playbook_params['snmpMode'] == "NOAUTHNOPRIV":
+ playbook_params.pop('snmpAuthPassphrase', None)
+ playbook_params.pop('snmpPrivPassphrase', None)
+ playbook_params.pop('snmpPrivProtocol', None)
+ playbook_params.pop('snmpAuthProtocol', None)
+ elif playbook_params['snmpMode'] == "AUTHNOPRIV":
+ playbook_params.pop('snmpPrivPassphrase', None)
+ playbook_params.pop('snmpPrivProtocol', None)
+
+ if playbook_params['netconfPort'] == " ":
+ playbook_params['netconfPort'] = None
+
+ if playbook_params['enablePassword'] == " ":
+ playbook_params['enablePassword'] = None
+
+ if playbook_params['netconfPort'] and playbook_params['cliTransport'] == "telnet":
+ self.log("""Updating the device cli transport from ssh to telnet with netconf port '{0}' so make
+ netconf port as None to perform the device update task""".format(playbook_params['netconfPort']), "DEBUG")
+ playbook_params['netconfPort'] = None
+
+ if not playbook_params['snmpVersion']:
+ if device_data['snmp_version'] == '3':
+ playbook_params['snmpVersion'] = "v3"
+ else:
+ playbook_params['snmpVersion'] = "v2"
+
+ if playbook_params['snmpVersion'] == 'v2':
+ params_to_remove = ["snmpAuthPassphrase", "snmpAuthProtocol", "snmpMode", "snmpPrivPassphrase", "snmpPrivProtocol", "snmpUserName"]
+ for param in params_to_remove:
+ playbook_params.pop(param, None)
+
+ if not playbook_params['snmpROCommunity']:
+ playbook_params['snmpROCommunity'] = device_data.get('snmp_community', None)
+
+ try:
+ if playbook_params['updateMgmtIPaddressList']:
+ new_mgmt_ipaddress = playbook_params['updateMgmtIPaddressList'][0]['newMgmtIpAddress']
+ if new_mgmt_ipaddress in self.have['device_in_dnac']:
+ self.status = "failed"
+ self.msg = "Device with IP address '{0}' already exists in inventory".format(new_mgmt_ipaddress)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ else:
+ self.log("Playbook parameter for updating device new management ip address: {0}".format(str(playbook_params)), "DEBUG")
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices',
+ op_modifies=True,
+ params=playbook_params,
+ )
+ self.log("Received API response from 'sync_devices': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ self.check_managementip_execution_response(response, device_ip, new_mgmt_ipaddress)
+ self.check_return_status()
+
+ else:
+ self.log("Playbook parameter for updating devices: {0}".format(str(playbook_params)), "DEBUG")
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices',
+ op_modifies=True,
+ params=playbook_params,
+ )
+ self.log("Received API response from 'sync_devices': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ self.check_device_update_execution_response(response, device_ip)
+ self.check_return_status()
+
+ except Exception as e:
+ error_message = "Error while updating device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ # Update list of interface details on specific or list of devices.
+ if self.config[0].get('update_interface_details'):
+ device_to_update = self.get_device_ips_from_config_priority()
+ self.update_interface_detail_of_device(device_to_update).check_return_status()
+
+ # If User defined field(UDF) not present then create it and add multiple udf to specific or list of devices
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+
+ if field_name is None:
+ self.status = "failed"
+ self.msg = "Error: The mandatory parameter 'name' for the User Defined Field is missing. Please provide the required information."
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ # Check if the Global User defined field exist if not then create it with given field name
+ udf_exist = self.is_udf_exist(field_name)
+
+ if not udf_exist:
+ # Create the Global UDF
+ self.log("Global User Defined Field '{0}' does not present in Cisco Catalyst Center, we need to create it".format(field_name), "DEBUG")
+ self.create_user_defined_field(udf).check_return_status()
+
+ # Get device Id based on config priority
+ device_ips = self.get_device_ips_from_config_priority()
+ device_ids = self.get_device_ids(device_ips)
+
+ if len(device_ids) == 0:
+ self.status = "failed"
+ self.msg = """Unable to assign Global User Defined Field: No devices found in Cisco Catalyst Center.
+ Please add devices to proceed."""
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ return self
+
+ # Now add code for adding Global UDF to device with Id
+ self.add_field_to_devices(device_ids, udf).check_return_status()
+
+ self.result['changed'] = True
+ self.msg = "Global User Defined Field(UDF) named '{0}' has been successfully added to the device.".format(field_name)
+ self.log(self.msg, "INFO")
+
+ # Once Wired device get added we will assign device to site and Provisioned it
+ if self.config[0].get('provision_wired_device'):
+ self.provisioned_wired_device().check_return_status()
+
+ # Once Wireless device get added we will assign device to site and Provisioned it
+ # Defer this feature as API issue is there once it's fixed we will addresses it in upcoming release iac2.0
+ if support_for_provisioning_wireless:
+ if self.config[0].get('provision_wireless_device'):
+ self.provisioned_wireless_devices().check_return_status()
+
+ if device_resynced:
+ self.resync_devices().check_return_status()
+
+ if device_reboot:
+ self.reboot_access_points().check_return_status()
+
+ if self.config[0].get('export_device_list'):
+ self.export_device_details().check_return_status()
+
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete devices in Cisco Catalyst Center based on device IP Address.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center
+ config (dict): A dictionary containing the list of device IP addresses to be deleted.
+ Returns:
+ object: An instance of the class with updated results and status based on the deletion operation.
+ Description:
+ This function is responsible for removing devices from the Cisco Catalyst Center inventory and
+ also unprovsioned and removed wired provsion devices from the Inventory page and also delete
+ the Global User Defined Field that are associated to the devices.
+ """
+
+ device_to_delete = self.get_device_ips_from_config_priority()
+ self.result['msg'] = []
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_id = self.get_udf_id(field_name)
+
+ if udf_id is None:
+ self.status = "success"
+ self.msg = "Global UDF '{0}' is not present in Cisco Catalyst Center".format(field_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['msg'] = self.msg
+ self.result['response'] = self.msg
+ return self
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='delete_user_defined_field',
+ params={"id": udf_id},
+ )
+ if response and isinstance(response, dict):
+ self.log("Received API response from 'delete_user_defined_field': {0}".format(str(response)), "DEBUG")
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'success' in execution_details.get("progress"):
+ self.status = "success"
+ self.msg = "Global UDF '{0}' deleted successfully from Cisco Catalyst Center".format(field_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Failed to delete Global User Defined Field(UDF) due to: {0}".format(failure_reason)
+ else:
+ self.msg = "Global UDF deletion get failed."
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ error_message = "Error while deleting Global UDF from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ return self
+
+ for device_ip in device_to_delete:
+ if device_ip not in self.have.get("device_in_dnac"):
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Device '{0}' is not present in Cisco Catalyst Center so can't perform delete operation".format(device_ip)
+ self.result['msg'].append(self.msg)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ continue
+
+ try:
+ provision_params = {
+ "device_management_ip_address": device_ip
+ }
+ prov_respone = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ params=provision_params,
+ )
+
+ if prov_respone.get("status") == "success":
+ response = self.dnac._exec(
+ family="sda",
+ function='delete_provisioned_wired_device',
+ params=provision_params,
+ )
+ executionid = response.get("executionId")
+
+ while True:
+ execution_details = self.get_execution_details(executionid)
+ if execution_details.get("status") == "SUCCESS":
+ self.result['changed'] = True
+ self.msg = execution_details.get("bapiName")
+ self.log(self.msg, "INFO")
+ self.result['response'].append(self.msg)
+ break
+ elif execution_details.get("bapiError"):
+ self.msg = execution_details.get("bapiError")
+ self.log(self.msg, "ERROR")
+ self.result['response'].append(self.msg)
+ break
+ except Exception as e:
+ device_id = self.get_device_ids([device_ip])
+ delete_params = {
+ "id": device_id[0],
+ "clean_config": self.config[0].get("clean_config", False)
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='delete_device_by_id',
+ params=delete_params,
+ )
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'success' in execution_details.get("progress"):
+ self.status = "success"
+ self.msg = "Device '{0}' was successfully deleted from Cisco Catalyst Center".format(device_ip)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device '{0}' deletion get failed due to: {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device '{0}' deletion get failed.".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ self.result['msg'].append(self.msg)
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Addition/Updation) of Devices in Cisco Catalyst Center.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ site exists in the Catalyst Center configuration.
+
+ The function performs the following verifications:
+ - Checks for devices added to Cisco Catalyst Center and logs the status.
+ - Verifies updated device roles and logs the status.
+ - Verifies updated interface details and logs the status.
+ - Verifies updated device credentials and logs the status.
+ - Verifies the creation of a global User Defined Field (UDF) and logs the status.
+ - Verifies the provisioning of wired devices and logs the status.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ devices_to_add = self.have["device_not_in_dnac"]
+ credential_update = self.config[0].get("credential_update", False)
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ device_ips = self.get_device_ips_from_config_priority()
+
+ if not devices_to_add:
+ self.status = "success"
+ msg = """Requested device(s) '{0}' have been successfully added to the Cisco Catalyst Center and their
+ addition has been verified.""".format(str(self.have['devices_in_playbook']))
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the device addition
+ task may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('update_interface_details'):
+ interface_update_flag = True
+ interface_names_list = self.config[0].get('update_interface_details').get('interface_name')
+
+ for device_ip in device_ips:
+ for interface_name in interface_names_list:
+ if not self.check_interface_details(device_ip, interface_name):
+ interface_update_flag = False
+ break
+
+ if interface_update_flag:
+ self.status = "success"
+ msg = "Interface details updated and verified successfully for devices {0}.".format(device_ips)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the update
+ interface details task may not have executed successfully.""", "INFO")
+
+ if credential_update and device_type == "NETWORK_DEVICE":
+ credential_update_flag = self.check_credential_update()
+
+ if credential_update_flag:
+ self.status = "success"
+ msg = "Device credentials and details updated and verified successfully in Cisco Catalyst Center."
+ self.log(msg, "INFO")
+ else:
+ self.log("Playbook parameter does not match with Cisco Catalyst Center, meaning device updation task not executed properly.", "INFO")
+ elif device_type != "NETWORK_DEVICE":
+ self.log("""Unable to compare the parameter for device type '{0}' in the playbook with the one in Cisco Catalyst Center."""
+ .format(device_type), "WARNING")
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_exist = self.is_udf_exist(field_name)
+
+ if udf_exist:
+ self.status = "success"
+ msg = "Global UDF {0} created and verified successfully".format(field_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter and Cisco Catalyst Center detected, indicating that
+ the task of creating Global UDF may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('role'):
+ device_role_flag = True
+
+ for device_ip in device_ips:
+ if not self.check_device_role(device_ip):
+ device_role_flag = False
+ break
+
+ if device_role_flag:
+ self.status = "success"
+ msg = "Device roles updated and verified successfully."
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter 'role' and Cisco Catalyst Center detected, indicating the
+ device role update task may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0].get('provision_wired_device')
+ provision_wired_flag = True
+ provision_device_list = []
+
+ for prov_dict in provision_wired_list:
+ device_ip = prov_dict['device_ip']
+ provision_device_list.append(device_ip)
+ if not self.get_provision_wired_device(device_ip):
+ provision_wired_flag = False
+ break
+
+ if provision_wired_flag:
+ self.status = "success"
+ msg = "Wired devices {0} get provisioned and verified successfully.".format(provision_device_list)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook's input and Cisco Catalyst Center detected, indicating that
+ the provisioning task may not have executed successfully.""", "INFO")
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of Device and Global UDF in Cisco Catalyst Center.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified Devices or Global UDF deleted from Cisco Catalyst Center.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+ input_devices = self.have["want_device"]
+ device_in_dnac = self.device_exists_in_dnac()
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_id = self.get_udf_id(field_name)
+
+ if udf_id is None:
+ self.status = "success"
+ msg = """Global UDF named '{0}' has been successfully deleted from Cisco Catalyst Center and the deletion
+ has been verified.""".format(field_name)
+ self.log(msg, "INFO")
+
+ return self
+
+ device_delete_flag = True
+ for device_ip in input_devices:
+ if device_ip in device_in_dnac:
+ device_after_deletion = device_ip
+ device_delete_flag = False
+ break
+
+ if device_delete_flag:
+ self.status = "success"
+ self.msg = "Requested device(s) '{0}' deleted from Cisco Catalyst Center and the deletion has been verified.".format(str(input_devices))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter device({0}) and Cisco Catalyst Center detected, indicating that
+ the device deletion task may not have executed successfully.""".format(device_after_deletion), "INFO")
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'type': 'str', 'required': True, },
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ dnac_device = DnacDevice(module)
+ state = dnac_device.params.get("state")
+
+ if state not in dnac_device.supported_states:
+ dnac_device.status = "invalid"
+ dnac_device.msg = "State {0} is invalid".format(state)
+ dnac_device.check_return_status()
+
+ dnac_device.validate_input().check_return_status()
+ config_verify = dnac_device.params.get("config_verify")
+
+ for config in dnac_device.validated_config:
+ dnac_device.reset_values()
+ dnac_device.get_want(config).check_return_status()
+ dnac_device.get_have(config).check_return_status()
+ dnac_device.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_device.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**dnac_device.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py
new file mode 100644
index 000000000..3eda0e2cc
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py
@@ -0,0 +1,3638 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Madhan Sankaranarayanan, Abhishek Maheshwari")
+
+DOCUMENTATION = r"""
+---
+module: inventory_workflow_manager
+short_description: Resource module for Network Device
+description:
+- Manage operations create, update and delete of the resource Network Device.
+- Adds the device with given credential.
+- Deletes the network device for the given Id.
+- Sync the devices provided as input.
+version_added: '6.8.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abhishek Maheshwari (@abmahesh)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description: List of devices with credentails to perform Add/Update/Delete/Resync operation
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ type:
+ description: Select Device's type from NETWORK_DEVICE, COMPUTE_DEVICE, MERAKI_DASHBOARD, THIRD_PARTY_DEVICE, FIREPOWER_MANAGEMENT_SYSTEM.
+ NETWORK_DEVICE - This refers to traditional networking equipment such as routers, switches, access points, and firewalls. These devices
+ are responsible for routing, switching, and providing connectivity within the network.
+ COMPUTE_DEVICE - These are computing resources such as servers, virtual machines, or containers that are part of the network infrastructure.
+ Cisco Catalyst Center can integrate with compute devices to provide visibility and management capabilities, ensuring that the network and
+ compute resources work together seamlessly to support applications and services.
+ MERAKI_DASHBOARD - It is cloud-based platform used to manage Meraki networking devices, including wireless access points, switches, security
+ appliances, and cameras.
+ THIRD_PARTY_DEVICE - This category encompasses devices from vendors other than Cisco or Meraki. Cisco Catalyst Center is designed to support
+ integration with third-party devices through open standards and APIs. This allows organizations to manage heterogeneous network
+ environments efficiently using Cisco Catalyst Center's centralized management and automation capabilities.
+ FIREPOWER_MANAGEMENT_SYSTEM - It is a centralized management console used to manage Cisco's Firepower Next-Generation Firewall (NGFW) devices.
+ It provides features such as policy management, threat detection, and advanced security analytics.
+ type: str
+ default: "NETWORK_DEVICE"
+ cli_transport:
+ description: The essential prerequisite for adding Network devices is the specification of the transport
+ protocol (either SSH or Telnet) used by the device.
+ type: str
+ compute_device:
+ description: Indicates whether a device is a compute device.
+ type: bool
+ password:
+ description: Password for accessing the device and for file encryption during device export. Required for
+ adding Network Device. Also needed for file encryption while exporting device in a csv file.
+ type: str
+ enable_password:
+ description: Password required for enabling configurations on the device.
+ type: str
+ extended_discovery_info:
+ description: Additional discovery information for the device.
+ type: str
+ http_password:
+ description: HTTP password required for adding compute, Meraki, and Firepower Management Devices.
+ type: str
+ http_port:
+ description: HTTP port number required for adding compute and Firepower Management Devices.
+ type: str
+ http_secure:
+ description: Flag indicating HTTP security.
+ type: bool
+ http_username:
+ description: HTTP username required for adding compute and Firepower Management Devices.
+ type: str
+ ip_address_list:
+ description: A list of the IP addresses for the devices. It is required for tasks such as adding, updating, deleting,
+ or resyncing devices, with Meraki devices being the exception.
+ elements: str
+ type: list
+ hostname_list:
+ description: "A list of hostnames representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses."
+ type: list
+ elements: str
+ serial_number_list:
+ description: A list of serial numbers representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses.
+ type: list
+ elements: str
+ mac_address_list:
+ description: "A list of MAC addresses representing devices. Operations such as updating, deleting, resyncing, or rebooting
+ can be performed as alternatives to using IP addresses."
+ type: list
+ elements: str
+ netconf_port:
+ description: Specifies the port number for connecting to devices using the Netconf protocol. Netconf (Network Configuration Protocol)
+ is used for managing network devices. Ensure that the provided port number corresponds to the Netconf service port configured
+ on your network devices.
+ NETCONF with user privilege 15 is mandatory for enabling Wireless Services on Wireless capable devices such as Catalyst 9000 series
+ Switches and C9800 Series Wireless Controllers. The NETCONF credentials are required to connect to C9800 Series Wireless Controllers
+ as the majority of data collection is done using NETCONF for these Devices.
+ type: str
+ username:
+ description: Username for accessing the device. Required for Adding Network Device.
+ type: str
+ snmp_auth_passphrase:
+ description: SNMP authentication passphrase required for adding network, compute, and third-party devices.
+ type: str
+ snmp_auth_protocol:
+ description: SNMP authentication protocol.
+ SHA (Secure Hash Algorithm) - cryptographic hash function commonly used for data integrity verification and authentication purposes.
+ type: str
+ default: "SHA"
+ snmp_mode:
+ description: Device's snmp Mode refer to different SNMP (Simple Network Management Protocol) versions and their corresponding security levels.
+ NOAUTHNOPRIV - This mode provides no authentication or encryption for SNMP messages. It means that devices communicating using SNMPv1 do
+ not require any authentication (username/password) or encryption (data confidentiality). This makes it the least secure option.
+ AUTHNOPRIV - This mode provides authentication but no encryption for SNMP messages. Authentication involves validating the source of the
+ SNMP messages using a community string (similar to a password). However, the data transmitted between devices is not encrypted,
+ so it's susceptible to eavesdropping.
+ AUTHPRIV - This mode provides both authentication and encryption for SNMP messages. It offers the highest level of security among the three
+ options. Authentication ensures that the source of the messages is genuine, and encryption ensures that the data exchanged between
+ devices is confidential and cannot be intercepted by unauthorized parties.
+ type: str
+ snmp_priv_passphrase:
+ description: SNMP private passphrase required for adding network, compute, and third-party devices.
+ type: str
+ snmp_priv_protocol:
+ description: SNMP private protocol required for adding network, compute, and third-party devices.
+ type: str
+ snmp_ro_community:
+ description: SNMP Read-Only community required for adding V2C devices.
+ type: str
+ snmp_rw_community:
+ description: SNMP Read-Write community required for adding V2C devices.
+ type: str
+ snmp_retry:
+ description: SNMP retry count.
+ type: int
+ default: 3
+ snmp_timeout:
+ description: SNMP timeout duration.
+ type: int
+ default: 5
+ snmp_username:
+ description: SNMP username required for adding network, compute, and third-party devices.
+ type: str
+ snmp_version:
+ description: It is a standard protocol used for managing and monitoring network devices.
+ v2 - In this communication between the SNMP manager (such as Cisco Catalyst) and the managed devices
+ (such as routers, switches, or access points) is based on community strings.Community strings serve
+ as form of authentication and they are transmitted in clear text, providing no encryption.
+ v3 - It is the most secure version of SNMP, providing authentication, integrity, and encryption features.
+ It allows for the use of usernames, authentication passwords, and encryption keys, providing stronger
+ security compared to v2.
+ type: str
+ update_mgmt_ipaddresslist:
+ description: List of updated management IP addresses for network devices.
+ type: list
+ elements: dict
+ suboptions:
+ exist_mgmt_ipaddress:
+ description: Device's existing Mgmt IpAddress.
+ type: str
+ new_mgmt_ipaddress:
+ description: Device's new Mgmt IpAddress.
+ type: str
+ force_sync:
+ description: If forcesync is true then device sync would run in high priority thread if available, else the sync will fail.
+ type: bool
+ default: False
+ device_resync:
+ description: Make this as true needed for the resyncing of device.
+ type: bool
+ default: False
+ reboot_device:
+ description: Make this as true needed for the Rebooting of Access Points.
+ type: bool
+ default: False
+ credential_update:
+ description: Make this as true needed for the updation of device credentials and other device details.
+ type: bool
+ default: False
+ clean_config:
+ description: Required if need to delete the Provisioned device by clearing current configuration.
+ type: bool
+ default: False
+ role:
+ description: Role of device which can be ACCESS, CORE, DISTRIBUTION, BORDER ROUTER, UNKNOWN.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ add_user_defined_field:
+ description: This operation will take dictionary as a parameter and in this we give details to
+ create/update/delete/assign multiple UDF to a device.
+ type: dict
+ suboptions:
+ name:
+ description: Name of Global User Defined Field. Required for creating/deleting UDF and then assigning it to device.
+ type: str
+ description:
+ description: Info about the global user defined field. Also used while updating interface details.
+ type: str
+ value:
+ description: Value to assign to tag with or without the same user defined field name.
+ type: str
+ update_interface_details:
+ description: This operation will take dictionary as a parameter and in this we give details to update interface details of device.
+ type: dict
+ suboptions:
+ description:
+ description: Specifies the description of the interface of the device.
+ type: str
+ interface_name:
+ description: Specify the list of interface names to update the details of the device interface.
+ (For example, GigabitEthernet1/0/11, FortyGigabitEthernet1/1/2)
+ type: list
+ elements: str
+ vlan_id:
+ description: Unique Id number assigned to a VLAN within a network used only while updating interface details.
+ type: int
+ voice_vlan_id:
+ description: Identifier used to distinguish a specific VLAN that is dedicated to voice traffic used only while updating interface details.
+ type: int
+ deployment_mode:
+ description: Preview/Deploy [Preview means the configuration is not pushed to the device. Deploy makes the configuration pushed to the device]
+ type: str
+ default: "Deploy"
+ clear_mac_address_table:
+ description: Set this to true if you need to clear the MAC address table for a specific device's interface. It's a boolean type,
+ with a default value of False.
+ type: bool
+ default: False
+ admin_status:
+ description: Status of Interface of a device, it can be (UP/DOWN).
+ type: str
+ export_device_list:
+ description: This operation take dictionary as parameter and export the device details as well as device credentials
+ details in a csv file.
+ type: dict
+ suboptions:
+ password:
+ description: Specifies the password for the encryption of file while exporting the device credentails into the file.
+ type: str
+ site_name:
+ description: Indicates the exact location where the wired device will be provisioned. This is a string value that should
+ represent the complete hierarchical path of the site (For example, "Global/USA/San Francisco/BGL_18/floor_pnp").
+ type: str
+ operation_enum:
+ description: enum(CREDENTIALDETAILS, DEVICEDETAILS) 0 to export Device Credential Details Or 1 to export Device Details.
+ CREDENTIALDETAILS - Used for exporting device credentials details like snpm credntials, device crdentails etc.
+ DEVICEDETAILS - Used for exporting device specific details like device hostname, serial number, type, family etc.
+ type: str
+ parameters:
+ description: List of device parameters that needs to be exported to file.(For example, ["componentName", "SerialNumber", "Last Sync Status"])
+ type: list
+ elements: str
+ provision_wired_device:
+ description: This parameter takes a list of dictionaries. Each dictionary provides the IP address of a wired device and
+ the name of the site where the device will be provisioned.
+ type: list
+ elements: dict
+ suboptions:
+ device_ip:
+ description: Specifies the IP address of the wired device. This is a string value that should be in the format of
+ standard IPv4 or IPv6 addresses.
+ type: str
+ version_added: 6.12.0
+ site_name:
+ description: Indicates the exact location where the wired device will be provisioned. This is a string value that should
+ represent the complete hierarchical path of the site (For example, "Global/USA/San Francisco/BGL_18/floor_pnp").
+ type: str
+ resync_retry_count:
+ description: Determines the total number of retry attempts for checking if the device has reached a managed state during
+ the provisioning process. If unspecified, the default value is set to 200 retries.
+ type: int
+ default: 200
+ version_added: 6.12.0
+ resync_retry_interval:
+ description: Sets the interval, in seconds, at which the system will recheck the device status throughout the provisioning
+ process. If unspecified, the system will check the device status every 2 seconds by default.
+ type: int
+ default: 2
+ version_added: 6.12.0
+
+requirements:
+- dnacentersdk >= 2.5.5
+- python >= 3.5
+seealso:
+- name: Cisco Catalyst Center documentation for Devices AddDevice2
+ description: Complete reference of the AddDevice2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-device
+- name: Cisco Catalyst Center documentation for Devices DeleteDeviceById
+ description: Complete reference of the DeleteDeviceById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-device-by-id
+- name: Cisco Catalyst Center documentation for Devices SyncDevices2
+ description: Complete reference of the SyncDevices2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!sync-devices
+notes:
+ - SDK Method used are
+ devices.Devices.add_device,
+ devices.Devices.delete_device_by_id,
+ devices.Devices.sync_devices,
+
+ - Paths used are
+ post /dna/intent/api/v1/network-device,
+ delete /dna/intent/api/v1/network-device/{id},
+ put /dna/intent/api/v1/network-device,
+
+ - Removed 'managementIpAddress' options in v4.3.0.
+ - Renamed argument 'ip_address' to 'ip_address_list' option in v6.12.0.
+ - Removed 'serial_number', 'device_added', 'role_source', options in v6.12.0.
+ - Added 'add_user_defined_field', 'update_interface_details', 'export_device_list' options in v6.13.1.
+ - Removed 'provision_wireless_device', 'reprovision_wired_device' options in v6.13.1.
+ - Added the parameter 'admin_status' options in v6.13.1.
+ - Removed 'device_updated' options in v6.13.1.
+
+"""
+
+EXAMPLES = r"""
+- name: Add new device in Inventory with full credentials
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - cli_transport: ssh
+ compute_device: False
+ password: Test@123
+ enable_password: Test@1234
+ extended_discovery_info: test
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ http_secure: False
+ ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ netconf_port: 830
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ snmp_version: v3
+ type: NETWORK_DEVICE
+ username: cisco
+
+- name: Add new Compute device in Inventory with full credentials.Inputs needed for Compute Device
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ compute_device: True
+ username: cisco
+ type: "COMPUTE_DEVICE"
+
+- name: Add new Meraki device in Inventory with full credentials.Inputs needed for Meraki Device.
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - http_password: "test"
+ type: "MERAKI_DASHBOARD"
+
+- name: Add new Firepower Management device in Inventory with full credentials.Input needed to add Device.
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ http_username: "testuser"
+ http_password: "test"
+ http_port: "443"
+ type: "FIREPOWER_MANAGEMENT_SYSTEM"
+
+- name: Add new Third Party device in Inventory with full credentials.Input needed to add Device.
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ snmp_auth_passphrase: "Lablab@12"
+ snmp_auth_protocol: SHA
+ snmp_mode: AUTHPRIV
+ snmp_priv_passphrase: "Lablab@123"
+ snmp_priv_protocol: AES256
+ snmp_retry: 3
+ snmp_timeout: 5
+ snmp_username: v3Public
+ type: "THIRD_PARTY_DEVICE"
+
+- name: Update device details or credentails in Inventory
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - cli_transport: telnet
+ compute_device: False
+ password: newtest123
+ enable_password: newtest1233
+ ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ type: NETWORK_DEVICE
+ credential_update: True
+
+- name: Update new management IP address of device in inventory
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1"]
+ credential_update: True
+ update_mgmt_ipaddresslist:
+ - exist_mgmt_ipaddress: "1.1.1.1"
+ new_mgmt_ipaddress: "12.12.12.12"
+
+- name: Associate Wired Devices to site and Provisioned it in Inventory
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - provision_wired_device:
+ - device_ip: "1.1.1.1"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_pnp"
+ resync_retry_count: 200
+ resync_interval: 2
+ - device_ip: "2.2.2.2"
+ site_name: "Global/USA/San Francisco/BGL_18/floor_test"
+ resync_retry_count: 200
+ resync_retry_interval: 2
+
+- name: Update Device Role with IP Address
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ role: ACCESS
+
+- name: Update Interface details with IP Address
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ update_interface_details:
+ description: "Testing for updating interface details"
+ admin_status: "UP"
+ vlan_id: 23
+ voice_vlan_id: 45
+ deployment_mode: "Deploy"
+ interface_name: ["GigabitEthernet1/0/11", FortyGigabitEthernet1/1/1]
+ clear_mac_address_table: True
+
+- name: Export Device Details in a CSV file Interface details with IP Address
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ export_device_list:
+ password: "File_password"
+ operation_enum: "0"
+ parameters: ["componentName", "SerialNumber", "Last Sync Status"]
+
+- name: Create Global User Defined with IP Address
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ add_user_defined_field:
+ - name: Test123
+ description: "Added first udf for testing"
+ value: "value123"
+ - name: Test321
+ description: "Added second udf for testing"
+ value: "value321"
+
+- name: Resync Device with IP Addresses
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ device_resync: True
+ force_sync: False
+
+- name: Reboot AP Devices with IP Addresses
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ reboot_device: True
+
+- name: Delete Provision/Unprovision Devices by IP Address
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: False
+ dnac_log_level: "{{dnac_log_level}}"
+ state: deleted
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ clean_config: False
+
+- name: Delete Global User Defined Field with name
+ cisco.dnac.inventory_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - ip_address_list: ["1.1.1.1", "2.2.2.2"]
+ add_user_defined_field:
+ name: "Test123"
+
+"""
+
+RETURN = r"""
+
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+# common approach when a module relies on optional dependencies that are not available during the validation process.
+try:
+ import pyzipper
+ HAS_PYZIPPER = True
+except ImportError:
+ HAS_PYZIPPER = False
+ pyzipper = None
+
+import csv
+import time
+from datetime import datetime
+from io import BytesIO, StringIO
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+)
+# Defer this feature as API issue is there once it's fixed we will addresses it in upcoming release iac2.0
+support_for_provisioning_wireless = False
+
+
+class Inventory(DnacBase):
+ """Class containing member attributes for inventory workflow manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.supported_states = ["merged", "deleted"]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed', and
+ 'self.msg' will describe the validation issues.
+ """
+
+ temp_spec = {
+ 'cli_transport': {'type': 'str'},
+ 'compute_device': {'type': 'bool'},
+ 'enable_password': {'type': 'str'},
+ 'extended_discovery_info': {'type': 'str'},
+ 'http_password': {'type': 'str'},
+ 'http_port': {'type': 'str'},
+ 'http_secure': {'type': 'bool'},
+ 'http_username': {'type': 'str'},
+ 'ip_address_list': {'type': 'list', 'elements': 'str'},
+ 'hostname_list': {'type': 'list', 'elements': 'str'},
+ 'mac_address_list': {'type': 'list', 'elements': 'str'},
+ 'netconf_port': {'type': 'str'},
+ 'password': {'type': 'str'},
+ 'serial_number': {'type': 'str'},
+ 'snmp_auth_passphrase': {'type': 'str'},
+ 'snmp_auth_protocol': {'default': "SHA", 'type': 'str'},
+ 'snmp_mode': {'type': 'str'},
+ 'snmp_priv_passphrase': {'type': 'str'},
+ 'snmp_priv_protocol': {'type': 'str'},
+ 'snmp_ro_community': {'type': 'str'},
+ 'snmp_rw_community': {'type': 'str'},
+ 'snmp_retry': {'default': 3, 'type': 'int'},
+ 'snmp_timeout': {'default': 5, 'type': 'int'},
+ 'snmp_username': {'type': 'str'},
+ 'snmp_version': {'type': 'str'},
+ 'update_mgmt_ipaddresslist': {'type': 'list', 'elements': 'dict'},
+ 'username': {'type': 'str'},
+ 'role': {'type': 'str'},
+ 'device_resync': {'type': 'bool'},
+ 'reboot_device': {'type': 'bool'},
+ 'credential_update': {'type': 'bool'},
+ 'force_sync': {'type': 'bool'},
+ 'clean_config': {'type': 'bool'},
+ 'add_user_defined_field': {
+ 'type': 'list',
+ 'name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'value': {'type': 'str'},
+ },
+ 'update_interface_details': {
+ 'type': 'dict',
+ 'description': {'type': 'str'},
+ 'vlan_id': {'type': 'int'},
+ 'voice_vlan_id': {'type': 'int'},
+ 'interface_name': {'type': 'list', 'elements': 'str'},
+ 'deployment_mode': {'default': 'Deploy', 'type': 'str'},
+ 'clear_mac_address_table': {'default': False, 'type': 'bool'},
+ 'admin_status': {'type': 'str'},
+ },
+ 'export_device_list': {
+ 'type': 'dict',
+ 'password': {'type': 'str'},
+ 'operation_enum': {'type': 'str'},
+ 'parameters': {'type': 'list', 'elements': 'str'},
+ },
+ 'provision_wired_device': {
+ 'type': 'list',
+ 'device_ip': {'type': 'str'},
+ 'site_name': {'type': 'str'},
+ 'resync_retry_count': {'default': 200, 'type': 'int'},
+ 'resync_retry_interval': {'default': 2, 'type': 'int'},
+ }
+ }
+
+ # Validate device params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_device_ips_from_config_priority(self):
+ """
+ Retrieve device IPs based on the configuration.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ list: A list containing device IPs.
+ Description:
+ This method retrieves device IPs based on the priority order specified in the configuration.
+ It first checks if device IPs are available. If not, it checks hostnames, serial numbers,
+ and MAC addresses in order and retrieves IPs based on availability.
+ If none of the information is available, an empty list is returned.
+ """
+ # Retrieve device IPs from the configuration
+ device_ips = self.config[0].get("ip_address_list")
+
+ if device_ips:
+ return device_ips
+
+ # If device IPs are not available, check hostnames
+ device_hostnames = self.config[0].get("hostname_list")
+ if device_hostnames:
+ return self.get_device_ips_from_hostname(device_hostnames)
+
+ # If hostnames are not available, check serial numbers
+ device_serial_numbers = self.config[0].get("serial_number_list")
+ if device_serial_numbers:
+ return self.get_device_ips_from_serial_number(device_serial_numbers)
+
+ # If serial numbers are not available, check MAC addresses
+ device_mac_addresses = self.config[0].get("mac_address_list")
+ if device_mac_addresses:
+ return self.get_device_ips_from_mac_address(device_mac_addresses)
+
+ # If no information is available, return an empty list
+ return []
+
+ def device_exists_in_ccc(self):
+ """
+ Check which devices already exists in Cisco Catalyst Center and return both device_exist and device_not_exist in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ list: A list of devices that exist in Cisco Catalyst Center.
+ Description:
+ Queries Cisco Catalyst Center to check which devices are already present in Cisco Catalyst Center and store
+ its management IP address in the list of devices that exist.
+ Example:
+ To use this method, create an instance of the class and call 'device_exists_in_ccc' on it,
+ The method returns a list of management IP addressesfor devices that exist in Cisco Catalyst Center.
+ """
+
+ device_in_ccc = []
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ )
+
+ except Exception as e:
+ error_message = "Error while fetching device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "CRITICAL")
+ raise Exception(error_message)
+
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ for ip in response:
+ device_ip = ip["managementIpAddress"]
+ device_in_ccc.append(device_ip)
+
+ return device_in_ccc
+
+ def is_udf_exist(self, field_name):
+ """
+ Check if a Global User Defined Field exists in Cisco Catalyst Center based on its name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ field_name (str): The name of the Global User Defined Field.
+ Returns:
+ bool: True if the Global User Defined Field exists, False otherwise.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve all Global User Defined Fields
+ with the specified name. If matching field is found, the function returns True, indicating that
+ the field exists else returns False.
+ """
+
+ response = self.dnac._exec(
+ family="devices",
+ function='get_all_user_defined_fields',
+ params={"name": field_name},
+ )
+
+ self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG")
+ udf = response.get("response")
+
+ if (len(udf) == 1):
+ return True
+
+ message = "Global User Defined Field with name '{0}' doesnot exist in Cisco Catalyst Center".format(field_name)
+ self.log(message, "INFO")
+
+ return False
+
+ def create_user_defined_field(self, udf):
+ """
+ Create a Global User Defined Field in Cisco Catalyst Center based on the provided configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ udf (dict): A dictionary having the payload for the creation of user defined field(UDF) in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function retrieves the configuration for adding a user-defined field from the configuration object,
+ sends the request to Cisco Catalyst Center to create the field, and logs the response.
+ """
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='create_user_defined_field',
+ params=udf,
+ )
+ self.log("Received API response from 'create_user_defined_field': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ field_name = udf.get('name')
+ self.log("Global User Defined Field with name '{0}' created successfully".format(field_name), "INFO")
+ self.status = "success"
+
+ except Exception as e:
+ error_message = "Error while creating Global UDF(User Defined Field) in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return self
+
+ def add_field_to_devices(self, device_ids, udf):
+ """
+ Add a Global user-defined field with specified details to a list of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ids (list): A list of device IDs to which the user-defined field will be added.
+ udf (dict): A dictionary having the user defined field details including name and value.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function retrieves the details of the user-defined field from the configuration object,
+ including the field name and default value then iterates over list of device IDs, creating a payload for
+ each device and sending the request to Cisco Catalyst Center to add the user-defined field.
+ """
+ field_name = udf.get('name')
+ field_value = udf.get('value', '1')
+ for device_id in device_ids:
+ payload = {}
+ payload['name'] = field_name
+ payload['value'] = field_value
+ udf_param_dict = {
+ 'payload': [payload],
+ 'device_id': device_id
+ }
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='add_user_defined_field_to_device',
+ params=udf_param_dict,
+ )
+ self.log("Received API response from 'add_user_defined_field_to_device': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ self.status = "success"
+ self.result['changed'] = True
+
+ except Exception as e:
+ self.status = "failed"
+ error_message = "Error while adding Global UDF to device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ self.result['changed'] = False
+
+ return self
+
+ def trigger_export_api(self, payload_params):
+ """
+ Triggers the export API to generate a CSV file containing device details based on the given payload parameters.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ payload_params (dict): A dictionary containing parameters required for the export API.
+ Returns:
+ dict: The response from the export API, including information about the task and file ID.
+ If the export is successful, the CSV file can be downloaded using the file ID.
+ Description:
+ The function initiates the export API in Cisco Catalyst Center to generate a CSV file containing detailed information
+ about devices.The response from the API includes task details and a file ID.
+ """
+
+ response = self.dnac._exec(
+ family="devices",
+ function='export_device_list',
+ op_modifies=True,
+ params=payload_params,
+ )
+ self.log("Received API response from 'export_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("additionalStatusURL"):
+ file_id = execution_details.get("additionalStatusURL").split("/")[-1]
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Could not get the File ID because of {0} so can't export device details in csv file".format(failure_reason)
+ else:
+ self.msg = "Could not get the File ID so can't export device details in csv file"
+ self.log(self.msg, "ERROR")
+
+ return response
+
+ # With this File ID call the Download File by FileID API and process the response
+ response = self.dnac._exec(
+ family="file",
+ function='download_a_file_by_fileid',
+ op_modifies=True,
+ params={"file_id": file_id},
+ )
+ self.log("Received API response from 'download_a_file_by_fileid': {0}".format(str(response)), "DEBUG")
+
+ return response
+
+ def decrypt_and_read_csv(self, response, password):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (requests.Response): HTTP response object containing the encrypted CSV file.
+ password (str): Password used for decrypting the CSV file.
+ Returns:
+ csv.DictReader: A CSV reader object for the decrypted content, allowing iteration over rows as dictionaries.
+ Description:
+ Decrypts and reads a CSV-like file from the given HTTP response using the provided password.
+ """
+
+ zip_data = BytesIO(response.data)
+
+ if not HAS_PYZIPPER:
+ self.msg = "pyzipper is required for this module. Install pyzipper to use this functionality."
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ snmp_protocol = self.config[0].get('snmp_priv_protocol', 'AES128')
+ encryption_dict = {
+ 'AES128': 'pyzipper.WZ_AES128',
+ 'AES192': 'pyzipper.WZ_AES192',
+ 'AES256': 'pyzipper.WZ_AES',
+ 'CISCOAES128': 'pyzipper.WZ_AES128',
+ 'CISCOAES192': 'pyzipper.WZ_AES192',
+ 'CISCOAES256': 'pyzipper.WZ_AES'
+ }
+ try:
+ encryption_method = encryption_dict.get(snmp_protocol)
+ except Exception as e:
+ self.log("Given SNMP protcol '{0}' not present".format(snmp_protocol), "WARNING")
+
+ if not encryption_method:
+ self.msg = "Invalid SNMP protocol '{0}' specified for encryption.".format(snmp_protocol)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ # Create a PyZipper object with the password
+ with pyzipper.AESZipFile(zip_data, 'r', compression=pyzipper.ZIP_LZMA, encryption=encryption_method) as zip_ref:
+ # Assuming there is a single file in the zip archive
+ file_name = zip_ref.namelist()[0]
+
+ # Extract the content of the file with the provided password
+ file_content_binary = zip_ref.read(file_name, pwd=password.encode('utf-8'))
+
+ # Now 'file_content_binary' contains the binary content of the decrypted file
+ # Since the content is text, so we can decode it
+ file_content_text = file_content_binary.decode('utf-8')
+
+ # Now 'file_content_text' contains the text content of the decrypted file
+ self.log("Text content of decrypted file: {0}".format(file_content_text), "DEBUG")
+
+ # Parse the CSV-like string into a list of dictionaries
+ csv_reader = csv.DictReader(StringIO(file_content_text))
+
+ return csv_reader
+
+ def export_device_details(self):
+ """
+ Export device details from Cisco Catalyst Center into a CSV file.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function exports device details from Cisco Catalyst Center based on the provided IP addresses in the configuration.
+ It retrieves the device UUIDs, calls the export device list API, and downloads the exported data of both device details and
+ and device credentials with an encrtypted zip file with password into CSV format.
+ The CSV data is then parsed and written to a file.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+
+ if not device_ips:
+ self.status = "failed"
+ self.msg = "Cannot export device details as no devices are specified in the playbook"
+ self.log(self.msg, "ERROR")
+ return self
+
+ try:
+ device_uuids = self.get_device_ids(device_ips)
+
+ if not device_uuids:
+ self.status = "failed"
+ self.result['changed'] = False
+ self.msg = "Could not find device UUIDs for exporting device details"
+ self.log(self.msg, "ERROR")
+ return self
+
+ # Now all device UUID get collected so call the export device list API
+ export_device_list = self.config[0].get('export_device_list')
+ password = export_device_list.get("password")
+
+ if not self.is_valid_password(password):
+ self.status = "failed"
+ detailed_msg = """Invalid password. Min password length is 8 and it should contain atleast one lower case letter,
+ one uppercase letter, one digit and one special characters from -=\\;,./~!@#$%^&*()_+{}[]|:?"""
+ formatted_msg = ' '.join(line.strip() for line in detailed_msg.splitlines())
+ self.msg = formatted_msg
+ self.log(formatted_msg, "INFO")
+ return self
+
+ payload_params = {
+ "deviceUuids": device_uuids,
+ "password": password,
+ "operationEnum": export_device_list.get("operation_enum", "0"),
+ "parameters": export_device_list.get("parameters")
+ }
+
+ response = self.trigger_export_api(payload_params)
+ self.check_return_status()
+
+ if payload_params["operationEnum"] == "0":
+ temp_file_name = response.filename
+ output_file_name = temp_file_name.split(".")[0] + ".csv"
+ csv_reader = self.decrypt_and_read_csv(response, password)
+ self.check_return_status()
+ else:
+ decoded_resp = response.data.decode(encoding='utf-8')
+ self.log("Decoded response of Export Device Credential file: {0}".format(str(decoded_resp)), "DEBUG")
+
+ # Parse the CSV-like string into a list of dictionaries
+ csv_reader = csv.DictReader(StringIO(decoded_resp))
+ current_date = datetime.now()
+ formatted_date = current_date.strftime("%m-%d-%Y")
+ output_file_name = "devices-" + str(formatted_date) + ".csv"
+
+ device_data = []
+ for row in csv_reader:
+ device_data.append(row)
+
+ # Write the data to a CSV file
+ with open(output_file_name, 'w', newline='') as csv_file:
+ fieldnames = device_data[0].keys()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
+ csv_writer.writeheader()
+ csv_writer.writerows(device_data)
+
+ self.msg = "Device Details Exported Successfully to the CSV file: {0}".format(output_file_name)
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.msg = "Error while exporting device details into CSV file for device(s): '{0}'".format(str(device_ips))
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+
+ return self
+
+ def get_ap_devices(self, device_ips):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the response is to be retrieved.
+ Returns:
+ list: A list containing Access Point device IP's obtained from the Cisco Catalyst Center.
+ Description:
+ This method communicates with Cisco Catalyst Center to retrieve the details of a device with the specified
+ management IP address and check if device family matched to Unified AP. It executes the 'get_device_list'
+ API call with the provided device IP address, logs the response, and returns list containing ap device ips.
+ """
+
+ ap_device_list = []
+ for device_ip in device_ips:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response', [])
+
+ if response and response[0].get('family', '') == "Unified AP":
+ ap_device_list.append(device_ip)
+ except Exception as e:
+ error_message = "Error while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "CRITICAL")
+ raise Exception(error_message)
+
+ return ap_device_list
+
+ def resync_devices(self):
+ """
+ Resync devices in Cisco Catalyst Center.
+ This function performs the Resync operation for the devices specified in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ The function expects the following parameters in the configuration:
+ - "ip_address_list": List of device IP addresses to be resynced.
+ - "force_sync": (Optional) Whether to force sync the devices. Defaults to "False".
+ """
+
+ # Code for triggers the resync operation using the retrieved device IDs and force sync parameter.
+ device_ips = self.get_device_ips_from_config_priority()
+ input_device_ips = device_ips.copy()
+ device_in_ccc = self.device_exists_in_ccc()
+
+ for device_ip in input_device_ips:
+ if device_ip not in device_in_ccc:
+ input_device_ips.remove(device_ip)
+
+ ap_devices = self.get_ap_devices(input_device_ips)
+ self.log("AP Devices from the playbook input are: {0}".format(str(ap_devices)), "INFO")
+
+ if ap_devices:
+ for ap_ip in ap_devices:
+ input_device_ips.remove(ap_ip)
+ self.log("Following devices {0} are AP, so can't perform resync operation.".format(str(ap_devices)), "WARNING")
+
+ if not input_device_ips:
+ self.msg = "Cannot perform the Resync operation as the device(s) with IP(s) {0} are not present in Cisco Catalyst Center".format(str(device_ips))
+ self.status = "success"
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ device_ids = self.get_device_ids(input_device_ips)
+ try:
+ force_sync = self.config[0].get("force_sync", False)
+ resync_param_dict = {
+ 'payload': device_ids,
+ 'force_sync': force_sync
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices_using_forcesync',
+ op_modifies=True,
+ params=resync_param_dict,
+ )
+ self.log("Received API response from 'sync_devices_using_forcesync': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'Synced' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Devices have been successfully resynced. Devices resynced: {0}".format(str(input_device_ips))
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device resynced get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device resynced get failed."
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ self.status = "failed"
+ error_message = "Error while resyncing device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return self
+
+ def reboot_access_points(self):
+ """
+ Reboot access points in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function performs a reboot operation on access points in Cisco Catalyst Center based on the provided IP addresses
+ in the configuration. It retrieves the AP devices' MAC addresses, calls the reboot access points API, and monitors
+ the progress of the reboot operation.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+ input_device_ips = device_ips.copy()
+
+ if input_device_ips:
+ ap_devices = self.get_ap_devices(input_device_ips)
+ self.log("AP Devices from the playbook input are: {0}".format(str(ap_devices)), "INFO")
+ for device_ip in input_device_ips:
+ if device_ip not in ap_devices:
+ input_device_ips.remove(device_ip)
+
+ if not input_device_ips:
+ self.msg = "No AP Devices IP given in the playbook so can't perform reboot operation"
+ self.status = "success"
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ # Get and store the apEthernetMacAddress of given devices
+ ap_mac_address_list = []
+ for device_ip in input_device_ips:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')
+ if not response:
+ continue
+
+ response = response[0]
+ ap_mac_address = response.get('apEthernetMacAddress')
+
+ if ap_mac_address is not None:
+ ap_mac_address_list.append(ap_mac_address)
+
+ if not ap_mac_address_list:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Cannot find the AP devices for rebooting"
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ # Now call the Reboot Access Point API
+ reboot_params = {
+ "apMacAddresses": ap_mac_address_list
+ }
+ response = self.dnac._exec(
+ family="wireless",
+ function='reboot_access_points',
+ op_modifies=True,
+ params=reboot_params,
+ )
+ self.log(str(response))
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'url' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "AP Device(s) {0} successfully rebooted!".format(str(input_device_ips))
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "AP Device Rebooting get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "AP Device Rebooting get failed"
+ self.log(self.msg, "ERROR")
+ break
+
+ return self
+
+ def handle_successful_provisioning(self, device_ip, execution_details, device_type):
+ """
+ Handle successful provisioning of Wired/Wireless device.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the provisioned device.
+ - execution_details (str): Details of the provisioning execution.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning of a device.
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.log("{0} Device {1} provisioned successfully!!".format(device_type, device_ip), "INFO")
+
+ def handle_failed_provisioning(self, device_ip, execution_details, device_type):
+ """
+ Handle failed provisioning of Wired/Wireless device.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the device that failed provisioning.
+ - execution_details (dict): Details of the failed provisioning execution in key "failureReason" indicating reason for failure.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the failure of provisioning for a device.
+ """
+
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason", "Unknown failure reason")
+ self.msg = "{0} Device Provisioning failed for {1} because of {2}".format(device_type, device_ip, failure_reason)
+ self.log(self.msg, "WARNING")
+
+ def handle_provisioning_exception(self, device_ip, exception, device_type):
+ """
+ Handle an exception during the provisioning process of Wired/Wireless device..
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_ip (str): The IP address of the device involved in provisioning.
+ - exception (Exception): The exception raised during provisioning.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method logs an error message indicating an exception occurred during the provisioning process for a device.
+ """
+
+ error_message = "Error while Provisioning the {0} device {1} in Cisco Catalyst Center: {2}".format(device_type, device_ip, str(exception))
+ self.log(error_message, "ERROR")
+
+ def handle_all_already_provisioned(self, device_ips, device_type):
+ """
+ Handle successful provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the provisioned device(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning for all devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.msg = "All the {0} Devices '{1}' given in the playbook are already Provisioned".format(device_type, str(device_ips))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ self.result['changed'] = False
+
+ def handle_all_provisioned(self, device_type):
+ """
+ Handle successful provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the provisioned devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs the successful provisioning for all devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.log("All {0} Devices provisioned successfully!!".format(device_type), "INFO")
+
+ def handle_all_failed_provision(self, device_type):
+ """
+ Handle failure of provisioning for all devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_type (str): The type or category of the devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status and logs a failure message indicating that
+ provisioning failed for all devices of a specific type.
+ """
+
+ self.status = "failed"
+ self.msg = "{0} Device Provisioning failed for all devices".format(device_type)
+ self.log(self.msg, "INFO")
+
+ def handle_partially_provisioned(self, provision_count, device_type):
+ """
+ Handle partial success in provisioning for devices(Wired/Wireless).
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - provision_count (int): The count of devices that were successfully provisioned.
+ - device_type (str): The type or category of the provisioned devices(Wired/Wireless).
+ Return:
+ None
+ Description:
+ This method updates the status, result, and logs a partial success message indicating that provisioning was successful
+ for a certain number of devices(Wired/Wireless).
+ """
+
+ self.status = "success"
+ self.result['changed'] = True
+ self.log("{0} Devices provisioned successfully partially for {1} devices".format(device_type, provision_count), "INFO")
+
+ def provisioned_wired_device(self):
+ """
+ Provision wired devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function provisions wired devices in Cisco Catalyst Center based on the configuration provided.
+ It retrieves the site name and IP addresses of the devices from the list of configuration,
+ attempts to provision each device with site, and monitors the provisioning process.
+ """
+
+ provision_wired_list = self.config[0]['provision_wired_device']
+ total_devices_to_provisioned = len(provision_wired_list)
+ device_ip_list = []
+ provision_count, already_provision_count = 0, 0
+
+ for prov_dict in provision_wired_list:
+ managed_flag = False
+ device_ip = prov_dict['device_ip']
+ device_ip_list.append(device_ip)
+ site_name = prov_dict['site_name']
+ device_type = "Wired"
+ resync_retry_count = prov_dict.get("resync_retry_count", 200)
+ # This resync retry interval will be in seconds which will check device status at given interval
+ resync_retry_interval = prov_dict.get("resync_retry_interval", 2)
+
+ if not site_name or not device_ip:
+ self.status = "failed"
+ self.msg = "Site and Device IP are required for Provisioning of Wired Devices."
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ provision_wired_params = {
+ 'deviceManagementIpAddress': device_ip,
+ 'siteNameHierarchy': site_name
+ }
+
+ # Check till device comes into managed state
+ while resync_retry_count:
+ response = self.get_device_response(device_ip)
+ self.log("Device is in {0} state waiting for Managed State.".format(response['managementState']), "DEBUG")
+
+ if (
+ response.get('managementState') == "Managed"
+ and response.get('collectionStatus') == "Managed"
+ and response.get("hostname")
+ ):
+ msg = """Device '{0}' comes to managed state and ready for provisioning with the resync_retry_count
+ '{1}' left having resync interval of {2} seconds""".format(device_ip, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = True
+ break
+ if response.get('collectionStatus') == "Partial Collection Failure" or response.get('collectionStatus') == "Could Not Synchronize":
+ device_status = response.get('collectionStatus')
+ msg = """Device '{0}' comes to '{1}' state and never goes for provisioning with the resync_retry_count
+ '{2}' left having resync interval of {3} seconds""".format(device_ip, device_status, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = False
+ break
+
+ time.sleep(resync_retry_interval)
+ resync_retry_count = resync_retry_count - 1
+
+ if not managed_flag:
+ self.log("""Device {0} is not transitioning to the managed state, so provisioning operation cannot
+ be performed.""".format(device_ip), "WARNING")
+ continue
+
+ try:
+ response = self.dnac._exec(
+ family="sda",
+ function='provision_wired_device',
+ op_modifies=True,
+ params=provision_wired_params,
+ )
+
+ if response.get("status") == "failed":
+ description = response.get("description")
+ error_msg = "Cannot do Provisioning for device {0} beacuse of {1}".format(device_ip, description)
+ self.log(error_msg)
+ continue
+
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+
+ if 'TASK_PROVISION' in progress:
+ self.handle_successful_provisioning(device_ip, execution_details, device_type)
+ provision_count += 1
+ break
+ elif execution_details.get("isError"):
+ self.handle_failed_provisioning(device_ip, execution_details, device_type)
+ break
+
+ except Exception as e:
+ # Not returning from here as there might be possiblity that for some devices it comes into exception
+ # but for others it gets provision successfully or If some devices are already provsioned
+ self.handle_provisioning_exception(device_ip, e, device_type)
+ if "already provisioned" in str(e):
+ self.log(str(e), "INFO")
+ already_provision_count += 1
+
+ # Check If all the devices are already provsioned, return from here only
+ if already_provision_count == total_devices_to_provisioned:
+ self.handle_all_already_provisioned(device_ip_list, device_type)
+ elif provision_count == total_devices_to_provisioned:
+ self.handle_all_provisioned(device_type)
+ elif provision_count == 0:
+ self.handle_all_failed_provision(device_type)
+ else:
+ self.handle_partially_provisioned(provision_count, device_type)
+
+ return self
+
+ def get_wireless_param(self, prov_dict):
+ """
+ Get wireless provisioning parameters for a device.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ prov_dict (dict): A dictionary containing configuration parameters for wireless provisioning.
+ Returns:
+ wireless_param (list of dict): A list containing a dictionary with wireless provisioning parameters.
+ Description:
+ This function constructs a list containing a dictionary with wireless provisioning parameters based on the
+ configuration provided in the playbook. It validates the managed AP locations, ensuring they are of type "floor."
+ The function then queries Cisco Catalyst Center to get network device details using the provided device IP.
+ If the device is not found, the function returns the class instance with appropriate status and log messages and
+ returns the wireless provisioning parameters containing site information, managed AP
+ locations, dynamic interfaces, and device name.
+ """
+
+ try:
+ device_ip_address = prov_dict['device_ip']
+ site_name = prov_dict['site_name']
+
+ wireless_param = [
+ {
+ 'site': site_name,
+ 'managedAPLocations': prov_dict['managed_ap_locations'],
+ }
+ ]
+
+ for ap_loc in wireless_param[0]["managedAPLocations"]:
+ if self.get_site_type(site_name=ap_loc) != "floor":
+ self.status = "failed"
+ self.msg = "Managed AP Location must be a floor"
+ self.log(self.msg, "ERROR")
+ return self
+ wireless_param[0]["dynamicInterfaces"] = []
+
+ for interface in prov_dict.get("dynamic_interfaces"):
+ interface_dict = {
+ "interfaceIPAddress": interface.get("interface_ip_address"),
+ "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_cidr"),
+ "interfaceGateway": interface.get("interface_gateway"),
+ "lagOrPortNumber": interface.get("lag_or_port_number"),
+ "vlanId": interface.get("vlan_id"),
+ "interfaceName": interface.get("interface_name")
+ }
+ wireless_param[0]["dynamicInterfaces"].append(interface_dict)
+
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": device_ip_address}
+ )
+
+ response = response.get("response")
+ wireless_param[0]["deviceName"] = response.get("hostname")
+ self.wireless_param = wireless_param
+ self.status = "success"
+ self.log("Successfully collected all the parameters required for Wireless Provisioning", "DEBUG")
+
+ except Exception as e:
+ self.msg = """An exception occured while fetching the details for wireless provisioning of
+ device '{0}' due to - {1}""".format(device_ip_address, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_site_type(self, site_name):
+ """
+ Get the type of a site in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_name (str): The name of the site for which to retrieve the type.
+ Returns:
+ site_type (str or None): The type of the specified site, or None if the site is not found.
+ Description:
+ This function queries Cisco Catalyst Center to retrieve the type of a specified site. It uses the
+ get_site API with the provided site name, extracts the site type from the response, and returns it.
+ If the specified site is not found, the function returns None, and an appropriate log message is generated.
+ """
+
+ try:
+ site_type = None
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+
+ if not response:
+ self.msg = "Site '{0}' not found".format(site_name)
+ self.log(self.msg, "INFO")
+ return site_type
+
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+
+ except Exception as e:
+ self.msg = "Error while fetching the site '{0}' and the specified site was not found in Cisco Catalyst Center.".format(site_name)
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(msg=self.msg, response=[self.msg])
+
+ return site_type
+
+ def provisioned_wireless_devices(self):
+ """
+ Provision Wireless devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with updated result, status, and log.
+ Description:
+ This function performs wireless provisioning for the provided list of device IP addresses.
+ It iterates through each device, retrieves provisioning parameters using the get_wireless_param function,
+ and then calls the Cisco Catalyst Center API for wireless provisioning. If all devices are already provisioned,
+ it returns success with a relevant message.
+ """
+
+ provision_count, already_provision_count = 0, 0
+ device_type = "Wireless"
+ device_ip_list = []
+ provision_wireless_list = self.config[0]['provision_wireless_device']
+
+ for prov_dict in provision_wireless_list:
+ try:
+ # Collect the device parameters from the playbook to perform wireless provisioing
+ self.get_wireless_param(prov_dict).check_return_status()
+ device_ip = prov_dict['device_ip']
+ device_ip_list.append(device_ip)
+ provisioning_params = self.wireless_param
+ resync_retry_count = prov_dict.get("resync_retry_count", 200)
+ # This resync retry interval will be in seconds which will check device status at given interval
+ resync_retry_interval = prov_dict.get("resync_retry_interval", 2)
+ managed_flag = True
+
+ # Check till device comes into managed state
+ while resync_retry_count:
+ response = self.get_device_response(device_ip)
+ self.log("Device is in {0} state waiting for Managed State.".format(response['managementState']), "DEBUG")
+
+ if (
+ response.get('managementState') == "Managed"
+ and response.get('collectionStatus') == "Managed"
+ and response.get("hostname")
+ ):
+ msg = """Device '{0}' comes to managed state and ready for provisioning with the resync_retry_count
+ '{1}' left having resync interval of {2} seconds""".format(device_ip, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = True
+ break
+
+ if response.get('collectionStatus') == "Partial Collection Failure" or response.get('collectionStatus') == "Could Not Synchronize":
+ device_status = response.get('collectionStatus')
+ msg = """Device '{0}' comes to '{1}' state and never goes for provisioning with the resync_retry_count
+ '{2}' left having resync interval of {3} seconds""".format(device_ip, device_status, resync_retry_count, resync_retry_interval)
+ self.log(msg, "INFO")
+ managed_flag = False
+ break
+
+ time.sleep(resync_retry_interval)
+ resync_retry_count = resync_retry_count - 1
+
+ if not managed_flag:
+ self.log("""Device {0} is not transitioning to the managed state, so provisioning operation cannot
+ be performed.""".format(device_ip), "WARNING")
+ continue
+
+ # Now we have provisioning_param so we can do wireless provisioning
+ response = self.dnac_apply['exec'](
+ family="wireless",
+ function="provision",
+ op_modifies=True,
+ params=provisioning_params,
+ )
+
+ if response.get("status") == "failed":
+ description = response.get("description")
+ error_msg = "Cannot do Provisioning for Wireless device {0} beacuse of {1}".format(device_ip, description)
+ self.log(error_msg, "ERROR")
+ continue
+
+ task_id = response.get("taskId")
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+
+ if 'TASK_PROVISION' in progress:
+ self.handle_successful_provisioning(device_ip, execution_details, device_type)
+ provision_count += 1
+ break
+ elif execution_details.get("isError"):
+ self.handle_failed_provisioning(device_ip, execution_details, device_type)
+ break
+
+ except Exception as e:
+ # Not returning from here as there might be possiblity that for some devices it comes into exception
+ # but for others it gets provision successfully or If some devices are already provsioned
+ self.handle_provisioning_exception(device_ip, e, device_type)
+ if "already provisioned" in str(e):
+ self.msg = "Device '{0}' already provisioned".format(device_ip)
+ self.log(self.msg, "INFO")
+ already_provision_count += 1
+
+ # Check If all the devices are already provsioned, return from here only
+ if already_provision_count == len(device_ip_list):
+ self.handle_all_already_provisioned(device_ip_list, device_type)
+ elif provision_count == len(device_ip_list):
+ self.handle_all_provisioned(device_type)
+ elif provision_count == 0:
+ self.handle_all_failed_provision(device_type)
+ else:
+ self.handle_partially_provisioned(provision_count, device_type)
+
+ return self
+
+ def get_udf_id(self, field_name):
+ """
+ Get the ID of a Global User Defined Field in Cisco Catalyst Center based on its name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ field_name (str): The name of the Global User Defined Field.
+ Returns:
+ str: The ID of the Global User Defined Field.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve all Global User Defined Fields
+ with the specified name and extracts the ID of the first matching field.If successful, it returns
+ the ID else returns None.
+ """
+
+ try:
+ udf_id = None
+ response = self.dnac._exec(
+ family="devices",
+ function='get_all_user_defined_fields',
+ params={"name": field_name},
+ )
+ self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG")
+ udf = response.get("response")
+ if udf:
+ udf_id = udf[0].get("id")
+
+ except Exception as e:
+ error_message = "Exception occurred while getting Global User Defined Fields(UDF) ID from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return udf_id
+
+ def mandatory_parameter(self):
+ """
+ Check for and validate mandatory parameters for adding network devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ Returns:
+ dict: The input `config` dictionary if all mandatory parameters are present.
+ Description:
+ It will check the mandatory parameters for adding the devices in Cisco Catalyst Center.
+ """
+
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ params_dict = {
+ "NETWORK_DEVICE": ["ip_address_list", "password", "username"],
+ "COMPUTE_DEVICE": ["ip_address_list", "http_username", "http_password", "http_port"],
+ "MERAKI_DASHBOARD": ["http_password"],
+ "FIREPOWER_MANAGEMENT_SYSTEM": ["ip_address_list", "http_username", "http_password"],
+ "THIRD_PARTY_DEVICE": ["ip_address_list"]
+ }
+
+ params_list = params_dict.get(device_type, [])
+
+ mandatory_params_absent = []
+ for param in params_list:
+ if param not in self.config[0]:
+ mandatory_params_absent.append(param)
+
+ if mandatory_params_absent:
+ self.status = "failed"
+ self.msg = "Required parameters {0} for adding devices are not present".format(str(mandatory_params_absent))
+ self.result['msg'] = self.msg
+ self.log(self.msg, "ERROR")
+ else:
+ self.status = "success"
+ self.msg = "Required parameter for Adding the devices in Inventory are present."
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_have(self, config):
+ """
+ Retrieve and check device information with Cisco Catalyst Center to determine if devices already exist.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Cisco Catalyst Center.
+ config (dict): A dictionary containing the configuration details of devices to be checked.
+ Returns:
+ dict: A dictionary containing information about the devices in the playbook, devices that exist in
+ Cisco Catalyst Center, and devices that are not present in Cisco Catalyst Center.
+ Description:
+ This function checks the specified devices in the playbook against the devices existing in Cisco Catalyst Center with following keys:
+ - "want_device": A list of devices specified in the playbook.
+ - "device_in_ccc": A list of devices that already exist in Cisco Catalyst Center.
+ - "device_not_in_ccc": A list of devices that are not present in Cisco Catalyst Center.
+ """
+
+ have = {}
+ want_device = self.get_device_ips_from_config_priority()
+
+ # Get the list of device that are present in Cisco Catalyst Center
+ device_in_ccc = self.device_exists_in_ccc()
+ device_not_in_ccc, devices_in_playbook = [], []
+
+ for ip in want_device:
+ devices_in_playbook.append(ip)
+ if ip not in device_in_ccc:
+ device_not_in_ccc.append(ip)
+
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0].get('provision_wired_device')
+
+ for prov_dict in provision_wired_list:
+ device_ip_address = prov_dict['device_ip']
+ if device_ip_address not in want_device:
+ devices_in_playbook.append(device_ip_address)
+ if device_ip_address not in device_in_ccc:
+ device_not_in_ccc.append(device_ip_address)
+
+ if support_for_provisioning_wireless:
+ if self.config[0].get('provision_wireless_device'):
+ provision_wireless_list = self.config[0].get('provision_wireless_device')
+
+ for prov_dict in provision_wireless_list:
+ device_ip_address = prov_dict['device_ip']
+ if device_ip_address not in want_device and device_ip_address not in devices_in_playbook:
+ devices_in_playbook.append(device_ip_address)
+ if device_ip_address not in device_in_ccc and device_ip_address not in device_not_in_ccc:
+ device_not_in_ccc.append(device_ip_address)
+
+ self.log("Device(s) {0} exists in Cisco Catalyst Center".format(str(device_in_ccc)), "INFO")
+ have["want_device"] = want_device
+ have["device_in_ccc"] = device_in_ccc
+ have["device_not_in_ccc"] = device_not_in_ccc
+ have["devices_in_playbook"] = devices_in_playbook
+
+ self.have = have
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_device_params(self, params):
+ """
+ Extract and store device parameters from the playbook for device processing in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ params (dict): A dictionary containing device parameters retrieved from the playbook.
+ Returns:
+ dict: A dictionary containing the extracted device parameters.
+ Description:
+ This function will extract and store parameters in dictionary for adding, updating, editing, or deleting devices Cisco Catalyst Center.
+ """
+
+ device_param = {
+ "cliTransport": params.get("cli_transport"),
+ "enablePassword": params.get("enable_password"),
+ "password": params.get("password"),
+ "ipAddress": params.get("ip_address_list"),
+ "snmpAuthPassphrase": params.get("snmp_auth_passphrase"),
+ "snmpAuthProtocol": params.get("snmp_auth_protocol"),
+ "snmpMode": params.get("snmp_mode"),
+ "snmpPrivPassphrase": params.get("snmp_priv_passphrase"),
+ "snmpPrivProtocol": params.get("snmp_priv_protocol"),
+ "snmpROCommunity": params.get("snmp_ro_community"),
+ "snmpRWCommunity": params.get("snmp_rw_community"),
+ "snmpRetry": params.get("snmp_retry"),
+ "snmpTimeout": params.get("snmp_timeout"),
+ "snmpUserName": params.get("snmp_username"),
+ "userName": params.get("username"),
+ "computeDevice": params.get("compute_device"),
+ "extendedDiscoveryInfo": params.get("extended_discovery_info"),
+ "httpPassword": params.get("http_password"),
+ "httpPort": params.get("http_port"),
+ "httpSecure": params.get("http_secure"),
+ "httpUserName": params.get("http_username"),
+ "netconfPort": params.get("netconf_port"),
+ "serialNumber": params.get("serial_number"),
+ "snmpVersion": params.get("snmp_version"),
+ "type": params.get("type"),
+ "updateMgmtIPaddressList": params.get("update_mgmt_ipaddresslist"),
+ "forceSync": params.get("force_sync"),
+ "cleanConfig": params.get("clean_config")
+ }
+
+ if device_param.get("updateMgmtIPaddressList"):
+ device_mngmt_dict = device_param.get("updateMgmtIPaddressList")[0]
+ device_param["updateMgmtIPaddressList"][0] = {}
+
+ device_param["updateMgmtIPaddressList"][0].update(
+ {
+ "existMgmtIpAddress": device_mngmt_dict.get("exist_mgmt_ipaddress"),
+ "newMgmtIpAddress": device_mngmt_dict.get("new_mgmt_ipaddress")
+ })
+
+ return device_param
+
+ def get_device_ids(self, device_ips):
+ """
+ Get the list of unique device IDs for list of specified management IP addresses of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ips (list): The management IP addresses of devices for which you want to retrieve the device IDs.
+ Returns:
+ list: The list of unique device IDs for the specified devices.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device ID associated with a device having the specified
+ IP address. If the device is not found in Cisco Catalyst Center, then print the log message with error severity.
+ """
+
+ device_ids = []
+
+ for device_ip in device_ips:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if not response:
+ continue
+ device_id = response[0]["id"]
+ device_ids.append(device_id)
+
+ except Exception as e:
+ error_message = "Error while fetching device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ids
+
+ def get_device_ips_from_hostname(self, hostname_list):
+ """
+ Get the list of unique device IPs for list of specified hostnames of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ hostname_list (list): The hostnames of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices hostname list.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IP's associated with a device having the specified
+ list of hostnames. If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for hostname in hostname_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"hostname": hostname}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_device_ips_from_serial_number(self, serial_number_list):
+ """
+ Get the list of unique device IPs for a specified list of serial numbers in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ serial_number_list (list): The list of serial number of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices with serial numbers.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IPs associated with a device having the specified
+ serial numbers.If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for serial_number in serial_number_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"serialNumber": serial_number}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center - {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_device_ips_from_mac_address(self, mac_address_list):
+ """
+ Get the list of unique device IPs for list of specified mac address of devices in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ mac_address_list (list): The list of mac address of devices for which you want to retrieve the device IPs.
+ Returns:
+ list: The list of unique device IPs for the specified devices.
+ Description:
+ Queries Cisco Catalyst Center to retrieve the unique device IPs associated with a device having the specified
+ mac addresses. If a device is not found in Cisco Catalyst Center, an error log message is printed.
+ """
+
+ device_ips = []
+ for mac_address in mac_address_list:
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"macAddress": mac_address}
+ )
+ if response:
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+ if response:
+ device_ip = response[0]["managementIpAddress"]
+ if device_ip:
+ device_ips.append(device_ip)
+ except Exception as e:
+ error_message = "Exception occurred while fetching device from Cisco Catalyst Center - {0}".format(str(e))
+ self.log(error_message, "ERROR")
+
+ return device_ips
+
+ def get_interface_from_id_and_name(self, device_id, interface_name):
+ """
+ Retrieve the interface ID for a device in Cisco Catalyst Center based on device id and interface name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_id (str): The id of the device.
+ interface_name (str): Name of the interface for which details need to be collected.
+ Returns:
+ str: The interface ID for the specified device and interface name.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve the interface information
+ for the device with the provided device id and interface name and extracts the interface ID from the
+ response, and returns the interface ID.
+ """
+
+ try:
+ interface_detail_params = {
+ 'device_id': device_id,
+ 'name': interface_name
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_details',
+ params=interface_detail_params
+ )
+ self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if response:
+ self.status = "success"
+ interface_id = response["id"]
+ self.log("""Successfully fetched interface ID ({0}) by using device id {1} and interface name {2}."""
+ .format(interface_id, device_id, interface_name), "INFO")
+ return interface_id
+
+ except Exception as e:
+ error_message = "Error while fetching interface id for interface({0}) from Cisco Catalyst Center: {1}".format(interface_name, str(e))
+ self.log(error_message, "ERROR")
+ self.msg = error_message
+ self.status = "failed"
+ return self
+
+ def get_interface_from_ip(self, device_ip):
+ """
+ Get the interface ID for a device in Cisco Catalyst Center based on its IP address.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The IP address of the device.
+ Returns:
+ str: The interface ID for the specified device.
+ Description:
+ The function sends a request to Cisco Catalyst Center to retrieve the interface information
+ for the device with the provided IP address and extracts the interface ID from the
+ response, and returns the interface ID.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_by_ip',
+ params={"ip_address": device_ip}
+ )
+ self.log("Received API response from 'get_interface_by_ip': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if response:
+ interface_id = response[0]["id"]
+ self.log("Fetch Interface Id for device '{0}' successfully !!".format(device_ip))
+ return interface_id
+
+ except Exception as e:
+ error_message = "Error while fetching Interface Id for device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def get_device_response(self, device_ip):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the response is to be retrieved.
+ Returns:
+ dict: A dictionary containing details of the device obtained from the Cisco Catalyst Center.
+ Description:
+ This method communicates with Cisco Catalyst Center to retrieve the details of a device with the specified
+ management IP address. It executes the 'get_device_list' API call with the provided device IP address,
+ logs the response, and returns a dictionary containing information about the device.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')[0]
+
+ except Exception as e:
+ error_message = "Error while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ return response
+
+ def check_device_role(self, device_ip):
+ """
+ Checks if the device role and role source for a device in Cisco Catalyst Center match the specified values in the configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which the device role is to be checked.
+ Returns:
+ bool: True if the device role and role source match the specified values, False otherwise.
+ Description:
+ This method retrieves the device role and role source for a device in Cisco Catalyst Center using the
+ 'get_device_response' method and compares the retrieved values with specified values in the configuration
+ for updating device roles.
+ """
+
+ role = self.config[0].get('role')
+ response = self.get_device_response(device_ip)
+
+ return response.get('role') == role
+
+ def check_interface_details(self, device_ip, interface_name):
+ """
+ Checks if the interface details for a device in Cisco Catalyst Center match the specified values in the configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the device for which interface details are to be checked.
+ Returns:
+ bool: True if the interface details match the specified values, False otherwise.
+ Description:
+ This method retrieves the interface details for a device in Cisco Catalyst Center using the 'get_interface_by_ip' API call.
+ It then compares the retrieved details with the specified values in the configuration for updating interface details.
+ If all specified parameters match the retrieved values or are not provided in the playbook parameters, the function
+ returns True, indicating successful validation.
+ """
+ device_id = self.get_device_ids([device_ip])
+
+ if not device_id:
+ self.log("""Error: Device with IP '{0}' not found in Cisco Catalyst Center.Unable to update interface details."""
+ .format(device_ip), "ERROR")
+ return False
+
+ interface_detail_params = {
+ 'device_id': device_id[0],
+ 'name': interface_name
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='get_interface_details',
+ params=interface_detail_params
+ )
+ self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG")
+ response = response.get("response")
+
+ if not response:
+ self.log("No response received from the API 'get_interface_details'.", "DEBUG")
+ return False
+
+ response_params = {
+ 'description': response.get('description'),
+ 'adminStatus': response.get('adminStatus'),
+ 'voiceVlanId': response.get('voiceVlan'),
+ 'vlanId': int(response.get('vlanId'))
+ }
+
+ interface_playbook_params = self.config[0].get('update_interface_details')
+ playbook_params = {
+ 'description': interface_playbook_params.get('description', ''),
+ 'adminStatus': interface_playbook_params.get('admin_status'),
+ 'voiceVlanId': interface_playbook_params.get('voice_vlan_id', ''),
+ 'vlanId': interface_playbook_params.get('vlan_id')
+ }
+
+ for key, value in playbook_params.items():
+ if not value:
+ continue
+ elif response_params[key] != value:
+ return False
+
+ return True
+
+ def check_credential_update(self):
+ """
+ Checks if the credentials for devices in the configuration match the updated values in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ bool: True if the credentials match the updated values, False otherwise.
+ Description:
+ This method triggers the export API in Cisco Catalyst Center to obtain the updated credential details for
+ the specified devices. It then decrypts and reads the CSV file containing the updated credentials,
+ comparing them with the credentials specified in the configuration.
+ """
+
+ device_ips = self.get_device_ips_from_config_priority()
+ device_uuids = self.get_device_ids(device_ips)
+ password = "Testing@123"
+ payload_params = {"deviceUuids": device_uuids, "password": password, "operationEnum": "0"}
+ response = self.trigger_export_api(payload_params)
+ self.check_return_status()
+ csv_reader = self.decrypt_and_read_csv(response, password)
+ self.check_return_status()
+ device_data = next(csv_reader, None)
+
+ if not device_data:
+ return False
+
+ csv_data_dict = {
+ 'snmp_retry': device_data['snmp_retries'],
+ 'username': device_data['cli_username'],
+ 'password': device_data['cli_password'],
+ 'enable_password': device_data['cli_enable_password'],
+ 'snmp_username': device_data['snmpv3_user_name'],
+ 'snmp_auth_protocol': device_data['snmpv3_auth_type'],
+ }
+
+ config = self.config[0]
+ for key in csv_data_dict:
+ if key in config and csv_data_dict[key] is not None:
+ if key == "snmp_retry" and int(csv_data_dict[key]) != int(config[key]):
+ return False
+ elif csv_data_dict[key] != config[key]:
+ return False
+
+ return True
+
+ def get_provision_wired_device(self, device_ip):
+ """
+ Retrieves the provisioning status of a wired device with the specified management IP address in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The management IP address of the wired device for which provisioning status is to be retrieved.
+ Returns:
+ bool: True if the device is provisioned successfully, False otherwise.
+ Description:
+ This method communicates with Cisco Catalyst Center to check the provisioning status of a wired device.
+ It executes the 'get_provisioned_wired_device' API call with the provided device IP address and
+ logs the response.
+ """
+
+ response = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ op_modifies=True,
+ params={"device_management_ip_address": device_ip}
+ )
+
+ if response.get("status") == "failed":
+ self.log("Cannot do provisioning for wired device {0} because of {1}.".format(device_ip, response.get('description')), "ERROR")
+ return False
+
+ return True
+
+ def clear_mac_address(self, interface_id, deploy_mode, interface_name):
+ """
+ Clear the MAC address table on a specific interface of a device.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ interface_id (str): The UUID of the interface where the MAC addresses will be cleared.
+ deploy_mode (str): The deployment mode of the device.
+ interface_name(str): The name of the interface for which the MAC addresses will be cleared.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function clears the MAC address table on a specific interface of a device.
+ The 'deploy_mode' parameter specifies the deployment mode of the device.
+ If the operation is successful, the function returns the response from the API call.
+ If an error occurs during the operation, the function logs the error details and updates the status accordingly.
+ """
+
+ try:
+ payload = {
+ "operation": "ClearMacAddress",
+ "payload": {}
+ }
+ clear_mac_address_payload = {
+ 'payload': payload,
+ 'interface_uuid': interface_id,
+ 'deployment_mode': deploy_mode
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='clear_mac_address_table',
+ op_modifies=True,
+ params=clear_mac_address_payload,
+ )
+ self.log("Received API response from 'clear_mac_address_table': {0}".format(str(response)), "DEBUG")
+
+ if not (response and isinstance(response, dict)):
+ self.status = "failed"
+ self.msg = """Received an empty response from the API 'clear_mac_address_table'. This indicates a failure to clear
+ the Mac address table for the interface '{0}'""".format(interface_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Failed to clear the Mac address table for the interface '{0}' due to {1}".format(interface_name, failure_reason)
+ else:
+ self.msg = "Failed to clear the Mac address table for the interface '{0}'".format(interface_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+ elif 'clear mac address-table' in execution_details.get("data"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Successfully executed the task of clearing the Mac address table for interface '{0}'".format(interface_name)
+ self.log(self.msg, "INFO")
+ break
+
+ except Exception as e:
+ error_msg = """An exception occurred during the process of clearing the MAC address table for interface {0}, due to -
+ {1}""".format(interface_name, str(e))
+ self.log(error_msg, "WARNING")
+ self.result['changed'] = False
+ self.result['response'] = error_msg
+
+ return self
+
+ def update_interface_detail_of_device(self, device_to_update):
+ """
+ Update interface details for a device in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_to_update (list): A list of IP addresses of devices to be updated.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method updates interface details for devices in Cisco Catalyst Center.
+ It iterates over the list of devices to be updated, retrieves interface parameters from the configuration,
+ calls the update interface details API with the required parameters, and checks the execution response.
+ If the update is successful, it sets the status to 'success' and logs an informational message.
+ """
+
+ # Call the Get interface details by device IP API and fetch the interface Id
+ for device_ip in device_to_update:
+ interface_params = self.config[0].get('update_interface_details')
+ interface_names_list = interface_params.get('interface_name')
+ for interface_name in interface_names_list:
+ device_id = self.get_device_ids([device_ip])
+ interface_id = self.get_interface_from_id_and_name(device_id[0], interface_name)
+ self.check_return_status()
+
+ # Now we call update interface details api with required parameter
+ try:
+ interface_params = self.config[0].get('update_interface_details')
+ clear_mac_address_table = interface_params.get("clear_mac_address_table", False)
+
+ if clear_mac_address_table:
+ response = self.get_device_response(device_ip)
+
+ if response.get('role').upper() != "ACCESS":
+ self.msg = "The action to clear the MAC Address table is only supported for devices with the ACCESS role."
+ self.log(self.msg, "WARNING")
+ self.result['response'] = self.msg
+ else:
+ deploy_mode = interface_params.get('deployment_mode', 'Deploy')
+ self.clear_mac_address(interface_id, deploy_mode, interface_name)
+ self.check_return_status()
+
+ temp_params = {
+ 'description': interface_params.get('description', ''),
+ 'adminStatus': interface_params.get('admin_status'),
+ 'voiceVlanId': interface_params.get('voice_vlan_id'),
+ 'vlanId': interface_params.get('vlan_id')
+ }
+ payload_params = {}
+ for key, value in temp_params.items():
+ if value is not None:
+ payload_params[key] = value
+
+ update_interface_params = {
+ 'payload': payload_params,
+ 'interface_uuid': interface_id,
+ 'deployment_mode': interface_params.get('deployment_mode', 'Deploy')
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='update_interface_details',
+ op_modifies=True,
+ params=update_interface_params,
+ )
+ self.log("Received API response from 'update_interface_details': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'SUCCESS' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Updated Interface Details for device '{0}' successfully".format(device_ip)
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Interface Updation get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Interface Updation get failed"
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ error_message = "Error while updating interface details in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Port actions are only supported on user facing/access ports as it's not allowed or No Updation required"
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def check_managementip_execution_response(self, response, device_ip, new_mgmt_ipaddress):
+ """
+ Check the execution response of a management IP update task.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (dict): The response received after initiating the management IP update task.
+ device_ip (str): The IP address of the device for which the management IP was updated.
+ new_mgmt_ipaddress (str): The new management IP address of the device.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the execution response of a management IP update task in Cisco Catalyst Center.
+ It continuously queries the task details until the task is completed or an error occurs.
+ If the task is successful, it sets the status to 'success' and logs an informational message.
+ If the task fails, it sets the status to 'failed' and logs an error message with the failure reason, if available.
+ """
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device new management IP updation for device '{0}' get failed due to {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device new management IP updation for device '{0}' get failed".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ elif execution_details.get("endTime"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = """Device '{0}' present in Cisco Catalyst Center and new management ip '{1}' have been
+ updated successfully""".format(device_ip, new_mgmt_ipaddress)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+
+ return self
+
+ def check_device_update_execution_response(self, response, device_ip):
+ """
+ Check the execution response of a device update task.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ response (dict): The response received after initiating the device update task.
+ device_ip (str): The IP address of the device for which the update is performed.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the execution response of a device update task in Cisco Catalyst Center.
+ It continuously queries the task details until the task is completed or an error occurs.
+ If the task is successful, it sets the status to 'success' and logs an informational message.
+ If the task fails, it sets the status to 'failed' and logs an error message with the failure reason, if available.
+ """
+
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device Updation for device '{0}' get failed due to {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device Updation for device '{0}' get failed".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ elif execution_details.get("endTime"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ self.msg = "Device '{0}' present in Cisco Catalyst Center and have been updated successfully".format(device_ip)
+ self.log(self.msg, "INFO")
+ break
+
+ return self
+
+ def is_device_exist_in_ccc(self, device_ip):
+ """
+ Check if a device with the given IP exists in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_ip (str): The IP address of the device to check.
+ Returns:
+ bool: True if the device exists, False otherwise.
+ Description:
+ This method queries Cisco Catalyst Center to check if a device with the specified
+ management IP address exists. If the device exists, it returns True; otherwise,
+ it returns False. If an error occurs during the process, it logs an error message
+ and raises an exception.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')
+ if not response:
+ self.log("Device with given IP '{0}' is not present in Cisco Catalyst Center".format(device_ip), "INFO")
+ return False
+
+ return True
+
+ except Exception as e:
+ error_message = "Error while getting the response of device '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def is_device_exist_for_update(self, device_to_update):
+ """
+ Check if the device(s) exist in Cisco Catalyst Center for update operation.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ device_to_update (list): A list of device(s) to be be checked present in Cisco Catalyst Center.
+ Returns:
+ bool: True if at least one of the devices to be updated exists in Cisco Catalyst Center,
+ False otherwise.
+ Description:
+ This function checks if any of the devices specified in the 'device_to_update' list
+ exists in Cisco Catalyst Center. It iterates through the list of devices and compares
+ each device with the list of devices present in Cisco Catalyst Center obtained from
+ 'self.have.get("device_in_ccc")'. If a match is found, it sets 'device_exist' to True
+ and breaks the loop.
+ """
+
+ # First check if device present in Cisco Catalyst Center or not
+ device_exist = False
+ for device in device_to_update:
+ if device in self.have.get("device_in_ccc"):
+ device_exist = True
+ break
+
+ return device_exist
+
+ def get_want(self, config):
+ """
+ Get all the device related information from playbook that is needed to be
+ add/update/delete/resync device in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing device-related information from the playbook.
+ Returns:
+ dict: A dictionary containing the extracted device parameters and other relevant information.
+ Description:
+ Retrieve all the device-related information from the playbook needed for adding, updating, deleting,
+ or resyncing devices in Cisco Catalyst Center.
+ """
+
+ want = {}
+ device_params = self.get_device_params(config)
+ want["device_params"] = device_params
+
+ self.want = want
+ self.msg = "Successfully collected all parameters from the playbook "
+ self.status = "success"
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Merge and process differences between existing devices and desired device configuration in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing the desired device configuration and relevant information from the playbook.
+ Returns:
+ object: An instance of the class with updated results and status based on the processing of differences.
+ Description:
+ The function processes the differences and, depending on the changes required, it may add, update,
+ or resynchronize devices in Cisco Catalyst Center.
+ The updated results and status are stored in the class instance for further use.
+ """
+ devices_to_add = self.have["device_not_in_ccc"]
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ device_resynced = self.config[0].get("device_resync", False)
+ device_reboot = self.config[0].get("reboot_device", False)
+ credential_update = self.config[0].get("credential_update", False)
+
+ config['type'] = device_type
+ if device_type == "FIREPOWER_MANAGEMENT_SYSTEM":
+ config['http_port'] = self.config[0].get("http_port", "443")
+
+ config['ip_address_list'] = devices_to_add
+
+ if self.config[0].get('update_mgmt_ipaddresslist'):
+ device_ip = self.config[0].get('update_mgmt_ipaddresslist')[0].get('existMgmtIpAddress')
+ is_device_exists = self.is_device_exist_in_ccc(device_ip)
+
+ if not is_device_exists:
+ self.status = "failed"
+ self.msg = """Unable to update the Management IP address because the device with IP '{0}' is not
+ found in Cisco Catalyst Center.""".format(device_ip)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if self.config[0].get('update_interface_details'):
+ device_to_update = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(device_to_update)
+
+ if not device_exist:
+ self.msg = """Unable to update interface details because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_to_update))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if self.config[0].get('role'):
+ devices_to_update_role = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(devices_to_update_role)
+
+ if not device_exist:
+ self.msg = """Unable to update device role because the device(s) listed: {0} are not present in the Cisco
+ Catalyst Center.""".format(str(devices_to_update_role))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if credential_update:
+ device_to_update = self.get_device_ips_from_config_priority()
+ device_exist = self.is_device_exist_for_update(device_to_update)
+
+ if not device_exist:
+ self.msg = """Unable to edit device credentials/details because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_to_update))
+ self.status = "failed"
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+
+ if not config['ip_address_list']:
+ self.msg = "Devices '{0}' already present in Cisco Catalyst Center".format(self.have['devices_in_playbook'])
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # To add the devices in inventory
+ input_params = self.want.get("device_params")
+ device_params = input_params.copy()
+
+ if not device_params['snmpVersion']:
+ device_params['snmpVersion'] = "v3"
+ device_params['ipAddress'] = config['ip_address_list']
+
+ if device_params['snmpVersion'] == "v2":
+ params_to_remove = ["snmpAuthPassphrase", "snmpAuthProtocol", "snmpMode", "snmpPrivPassphrase", "snmpPrivProtocol", "snmpUserName"]
+ for param in params_to_remove:
+ device_params.pop(param, None)
+
+ if not device_params['snmpROCommunity']:
+ self.status = "failed"
+ self.msg = "Required parameter 'snmpROCommunity' for adding device with snmmp version v2 is not present"
+ self.result['msg'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
+ else:
+ if not device_params['snmpMode']:
+ device_params['snmpMode'] = "AUTHPRIV"
+
+ if not device_params['cliTransport']:
+ device_params['cliTransport'] = "ssh"
+
+ if not device_params['snmpPrivProtocol']:
+ device_params['snmpPrivProtocol'] = "AES128"
+
+ if device_params['snmpPrivProtocol'] == "AES192":
+ device_params['snmpPrivProtocol'] = "CISCOAES192"
+ elif device_params['snmpPrivProtocol'] == "AES256":
+ device_params['snmpPrivProtocol'] = "CISCOAES256"
+
+ if device_params['snmpMode'] == "NOAUTHNOPRIV":
+ device_params.pop('snmpAuthPassphrase', None)
+ device_params.pop('snmpPrivPassphrase', None)
+ device_params.pop('snmpPrivProtocol', None)
+ device_params.pop('snmpAuthProtocol', None)
+ elif device_params['snmpMode'] == "AUTHNOPRIV":
+ device_params.pop('snmpPrivPassphrase', None)
+ device_params.pop('snmpPrivProtocol', None)
+
+ self.mandatory_parameter().check_return_status()
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='add_device',
+ op_modifies=True,
+ params=device_params,
+ )
+ self.log("Received API response from 'add_device': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if '/task/' in execution_details.get("progress"):
+ self.status = "success"
+ self.result['response'] = execution_details
+
+ if len(devices_to_add) > 0:
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' added to Cisco Catalyst Center".format(str(devices_to_add))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ break
+ self.msg = "Device(s) '{0}' already present in Cisco Catalyst Center".format(str(self.config[0].get("ip_address_list")))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device addition get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device addition get failed"
+ self.log(self.msg, "ERROR")
+ self.result['msg'] = self.msg
+ break
+
+ except Exception as e:
+ error_message = "Error while adding device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ # Update the role of devices having the role source as Manual
+ if self.config[0].get('role'):
+ devices_to_update_role = self.get_device_ips_from_config_priority()
+ device_role = self.config[0].get('role')
+ role_update_count = 0
+ for device_ip in devices_to_update_role:
+ device_id = self.get_device_ids([device_ip])
+
+ # Check if the same role of device is present in dnac then no need to change the state
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"managementIpAddress": device_ip}
+ )
+ response = response.get('response')[0]
+
+ if response.get('role') == device_role:
+ self.status = "success"
+ self.result['changed'] = False
+ role_update_count += 1
+ log_msg = "The device role '{0}' is already set in Cisco Catalyst Center, no update is needed.".format(device_role)
+ self.log(log_msg, "INFO")
+ continue
+
+ device_role_params = {
+ 'role': device_role,
+ 'roleSource': "MANUAL",
+ 'id': device_id[0]
+ }
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='update_device_role',
+ op_modifies=True,
+ params=device_role_params,
+ )
+ self.log("Received API response from 'update_device_role': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+ progress = execution_details.get("progress")
+
+ if 'successfully' in progress or 'succesfully' in progress:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(devices_to_update_role), device_role)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device role updation get failed because of {0}".format(failure_reason)
+ else:
+ self.msg = "Device role updation get failed"
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+
+ except Exception as e:
+ error_message = "Error while updating device role '{0}' in Cisco Catalyst Center: {1}".format(device_role, str(e))
+ self.log(error_message, "ERROR")
+
+ if role_update_count == len(devices_to_update_role):
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = """The device role '{0}' is already set in Cisco Catalyst Center, no device role update is needed for the
+ devices {1}.""".format(device_role, str(devices_to_update_role))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+
+ if credential_update:
+ device_to_update = self.get_device_ips_from_config_priority()
+
+ # Update Device details and credentails
+ device_uuids = self.get_device_ids(device_to_update)
+ password = "Testing@123"
+ export_payload = {"deviceUuids": device_uuids, "password": password, "operationEnum": "0"}
+ export_response = self.trigger_export_api(export_payload)
+ self.check_return_status()
+ csv_reader = self.decrypt_and_read_csv(export_response, password)
+ self.check_return_status()
+ device_details = {}
+
+ for row in csv_reader:
+ ip_address = row['ip_address']
+ device_details[ip_address] = row
+
+ for device_ip in device_to_update:
+ playbook_params = self.want.get("device_params").copy()
+ playbook_params['ipAddress'] = [device_ip]
+ device_data = device_details[device_ip]
+ if device_data['snmpv3_privacy_password'] == ' ':
+ device_data['snmpv3_privacy_password'] = None
+ if device_data['snmpv3_auth_password'] == ' ':
+ device_data['snmpv3_auth_password'] = None
+
+ if not playbook_params['snmpMode']:
+ if device_data['snmpv3_privacy_password']:
+ playbook_params['snmpMode'] = "AUTHPRIV"
+ elif device_data['snmpv3_auth_password']:
+ playbook_params['snmpMode'] = "AUTHNOPRIV"
+ else:
+ playbook_params['snmpMode'] = "NOAUTHNOPRIV"
+
+ if not playbook_params['cliTransport']:
+ if device_data['protocol'] == "ssh2":
+ playbook_params['cliTransport'] = "ssh"
+ else:
+ playbook_params['cliTransport'] = device_data['protocol']
+ if not playbook_params['snmpPrivProtocol']:
+ playbook_params['snmpPrivProtocol'] = device_data['snmpv3_privacy_type']
+
+ csv_data_dict = {
+ 'username': device_data['cli_username'],
+ 'password': device_data['cli_password'],
+ 'enable_password': device_data['cli_enable_password'],
+ 'netconf_port': device_data['netconf_port'],
+ }
+
+ if device_data['snmp_version'] == '3':
+ csv_data_dict['snmp_username'] = device_data['snmpv3_user_name']
+ if device_data['snmpv3_privacy_password']:
+ csv_data_dict['snmp_auth_passphrase'] = device_data['snmpv3_auth_password']
+ csv_data_dict['snmp_priv_passphrase'] = device_data['snmpv3_privacy_password']
+ else:
+ csv_data_dict['snmp_username'] = None
+
+ device_key_mapping = {
+ 'username': 'userName',
+ 'password': 'password',
+ 'enable_password': 'enablePassword',
+ 'snmp_username': 'snmpUserName',
+ 'netconf_port': 'netconfPort'
+ }
+ device_update_key_list = ["username", "password", "enable_password", "snmp_username", "netconf_port"]
+
+ for key in device_update_key_list:
+ mapped_key = device_key_mapping[key]
+
+ if playbook_params[mapped_key] is None:
+ playbook_params[mapped_key] = csv_data_dict[key]
+
+ if playbook_params['snmpMode'] == "AUTHPRIV":
+ if not playbook_params['snmpAuthPassphrase']:
+ playbook_params['snmpAuthPassphrase'] = csv_data_dict['snmp_auth_passphrase']
+ if not playbook_params['snmpPrivPassphrase']:
+ playbook_params['snmpPrivPassphrase'] = csv_data_dict['snmp_priv_passphrase']
+
+ if playbook_params['snmpPrivProtocol'] == "AES192":
+ playbook_params['snmpPrivProtocol'] = "CISCOAES192"
+ elif playbook_params['snmpPrivProtocol'] == "AES256":
+ playbook_params['snmpPrivProtocol'] = "CISCOAES256"
+
+ if playbook_params['snmpMode'] == "NOAUTHNOPRIV":
+ playbook_params.pop('snmpAuthPassphrase', None)
+ playbook_params.pop('snmpPrivPassphrase', None)
+ playbook_params.pop('snmpPrivProtocol', None)
+ playbook_params.pop('snmpAuthProtocol', None)
+ elif playbook_params['snmpMode'] == "AUTHNOPRIV":
+ playbook_params.pop('snmpPrivPassphrase', None)
+ playbook_params.pop('snmpPrivProtocol', None)
+
+ if playbook_params['netconfPort'] == " ":
+ playbook_params['netconfPort'] = None
+
+ if playbook_params['enablePassword'] == " ":
+ playbook_params['enablePassword'] = None
+
+ if playbook_params['netconfPort'] and playbook_params['cliTransport'] == "telnet":
+ self.log("""Updating the device cli transport from ssh to telnet with netconf port '{0}' so make
+ netconf port as None to perform the device update task""".format(playbook_params['netconfPort']), "DEBUG")
+ playbook_params['netconfPort'] = None
+
+ if not playbook_params['snmpVersion']:
+ if device_data['snmp_version'] == '3':
+ playbook_params['snmpVersion'] = "v3"
+ else:
+ playbook_params['snmpVersion'] = "v2"
+
+ if playbook_params['snmpVersion'] == 'v2':
+ params_to_remove = ["snmpAuthPassphrase", "snmpAuthProtocol", "snmpMode", "snmpPrivPassphrase", "snmpPrivProtocol", "snmpUserName"]
+ for param in params_to_remove:
+ playbook_params.pop(param, None)
+
+ if not playbook_params['snmpROCommunity']:
+ playbook_params['snmpROCommunity'] = device_data.get('snmp_community', None)
+
+ try:
+ if playbook_params['updateMgmtIPaddressList']:
+ new_mgmt_ipaddress = playbook_params['updateMgmtIPaddressList'][0]['newMgmtIpAddress']
+ if new_mgmt_ipaddress in self.have['device_in_ccc']:
+ self.status = "failed"
+ self.msg = "Device with IP address '{0}' already exists in inventory".format(new_mgmt_ipaddress)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ else:
+ self.log("Playbook parameter for updating device new management ip address: {0}".format(str(playbook_params)), "DEBUG")
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices',
+ op_modifies=True,
+ params=playbook_params,
+ )
+ self.log("Received API response from 'sync_devices': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ self.check_managementip_execution_response(response, device_ip, new_mgmt_ipaddress)
+ self.check_return_status()
+
+ else:
+ self.log("Playbook parameter for updating devices: {0}".format(str(playbook_params)), "DEBUG")
+ response = self.dnac._exec(
+ family="devices",
+ function='sync_devices',
+ op_modifies=True,
+ params=playbook_params,
+ )
+ self.log("Received API response from 'sync_devices': {0}".format(str(response)), "DEBUG")
+
+ if response and isinstance(response, dict):
+ self.check_device_update_execution_response(response, device_ip)
+ self.check_return_status()
+
+ except Exception as e:
+ error_message = "Error while updating device in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ # Update list of interface details on specific or list of devices.
+ if self.config[0].get('update_interface_details'):
+ device_to_update = self.get_device_ips_from_config_priority()
+ self.update_interface_detail_of_device(device_to_update).check_return_status()
+
+ # If User defined field(UDF) not present then create it and add multiple udf to specific or list of devices
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+
+ if field_name is None:
+ self.status = "failed"
+ self.msg = "Error: The mandatory parameter 'name' for the User Defined Field is missing. Please provide the required information."
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ # Check if the Global User defined field exist if not then create it with given field name
+ udf_exist = self.is_udf_exist(field_name)
+
+ if not udf_exist:
+ # Create the Global UDF
+ self.log("Global User Defined Field '{0}' does not present in Cisco Catalyst Center, we need to create it".format(field_name), "DEBUG")
+ self.create_user_defined_field(udf).check_return_status()
+
+ # Get device Id based on config priority
+ device_ips = self.get_device_ips_from_config_priority()
+ device_ids = self.get_device_ids(device_ips)
+
+ if not device_ids:
+ self.status = "failed"
+ self.msg = """Unable to assign Global User Defined Field: No devices found in Cisco Catalyst Center.
+ Please add devices to proceed."""
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ # Now add code for adding Global UDF to device with Id
+ self.add_field_to_devices(device_ids, udf).check_return_status()
+
+ self.result['changed'] = True
+ self.msg = "Global User Defined Field(UDF) named '{0}' has been successfully added to the device.".format(field_name)
+ self.log(self.msg, "INFO")
+
+ # Once Wired device get added we will assign device to site and Provisioned it
+ if self.config[0].get('provision_wired_device'):
+ self.provisioned_wired_device().check_return_status()
+
+ # Once Wireless device get added we will assign device to site and Provisioned it
+ # Defer this feature as API issue is there once it's fixed we will addresses it in upcoming release iac2.0
+ if support_for_provisioning_wireless:
+ if self.config[0].get('provision_wireless_device'):
+ self.provisioned_wireless_devices().check_return_status()
+
+ if device_resynced:
+ self.resync_devices().check_return_status()
+
+ if device_reboot:
+ self.reboot_access_points().check_return_status()
+
+ if self.config[0].get('export_device_list'):
+ self.export_device_details().check_return_status()
+
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete devices in Cisco Catalyst Center based on device IP Address.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center
+ config (dict): A dictionary containing the list of device IP addresses to be deleted.
+ Returns:
+ object: An instance of the class with updated results and status based on the deletion operation.
+ Description:
+ This function is responsible for removing devices from the Cisco Catalyst Center inventory and
+ also unprovsioned and removed wired provsion devices from the Inventory page and also delete
+ the Global User Defined Field that are associated to the devices.
+ """
+
+ device_to_delete = self.get_device_ips_from_config_priority()
+ self.result['msg'] = []
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_id = self.get_udf_id(field_name)
+
+ if udf_id is None:
+ self.status = "success"
+ self.msg = "Global UDF '{0}' is not present in Cisco Catalyst Center".format(field_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['msg'] = self.msg
+ return self
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='delete_user_defined_field',
+ params={"id": udf_id},
+ )
+ if response and isinstance(response, dict):
+ self.log("Received API response from 'delete_user_defined_field': {0}".format(str(response)), "DEBUG")
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'success' in execution_details.get("progress"):
+ self.status = "success"
+ self.msg = "Global UDF '{0}' deleted successfully from Cisco Catalyst Center".format(field_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Failed to delete Global User Defined Field(UDF) due to: {0}".format(failure_reason)
+ else:
+ self.msg = "Global UDF deletion get failed."
+ self.log(self.msg, "ERROR")
+ break
+
+ except Exception as e:
+ error_message = "Error while deleting Global UDF from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ return self
+
+ for device_ip in device_to_delete:
+ if device_ip not in self.have.get("device_in_ccc"):
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "Device '{0}' is not present in Cisco Catalyst Center so can't perform delete operation".format(device_ip)
+ self.result['msg'].append(self.msg)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ continue
+
+ try:
+ provision_params = {
+ "device_management_ip_address": device_ip
+ }
+ prov_respone = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ params=provision_params,
+ )
+
+ if prov_respone.get("status") == "success":
+ response = self.dnac._exec(
+ family="sda",
+ function='delete_provisioned_wired_device',
+ params=provision_params,
+ )
+ executionid = response.get("executionId")
+
+ while True:
+ execution_details = self.get_execution_details(executionid)
+ if execution_details.get("status") == "SUCCESS":
+ self.result['changed'] = True
+ self.msg = execution_details.get("bapiName")
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ self.result['msg'].append(self.msg)
+ break
+ elif execution_details.get("bapiError"):
+ self.msg = execution_details.get("bapiError")
+ self.result['msg'].append(self.msg)
+ self.log(self.msg, "ERROR")
+ break
+ except Exception as e:
+ device_id = self.get_device_ids([device_ip])
+ delete_params = {
+ "id": device_id[0],
+ "clean_config": self.config[0].get("clean_config", False)
+ }
+ response = self.dnac._exec(
+ family="devices",
+ function='delete_device_by_id',
+ params=delete_params,
+ )
+
+ if response and isinstance(response, dict):
+ task_id = response.get('response').get('taskId')
+
+ while True:
+ execution_details = self.get_task_details(task_id)
+
+ if 'success' in execution_details.get("progress"):
+ self.status = "success"
+ self.msg = "Device '{0}' was successfully deleted from Cisco Catalyst Center".format(device_ip)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ break
+ elif execution_details.get("isError"):
+ self.status = "failed"
+ failure_reason = execution_details.get("failureReason")
+ if failure_reason:
+ self.msg = "Device '{0}' deletion get failed due to: {1}".format(device_ip, failure_reason)
+ else:
+ self.msg = "Device '{0}' deletion get failed.".format(device_ip)
+ self.log(self.msg, "ERROR")
+ break
+ self.result['msg'].append(self.msg)
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Addition/Updation) of Devices in Cisco Catalyst Center.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ site exists in the Catalyst Center configuration.
+
+ The function performs the following verifications:
+ - Checks for devices added to Cisco Catalyst Center and logs the status.
+ - Verifies updated device roles and logs the status.
+ - Verifies updated interface details and logs the status.
+ - Verifies updated device credentials and logs the status.
+ - Verifies the creation of a global User Defined Field (UDF) and logs the status.
+ - Verifies the provisioning of wired devices and logs the status.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ devices_to_add = self.have["device_not_in_ccc"]
+ credential_update = self.config[0].get("credential_update", False)
+ device_type = self.config[0].get("type", "NETWORK_DEVICE")
+ device_ips = self.get_device_ips_from_config_priority()
+
+ if not devices_to_add:
+ self.status = "success"
+ msg = """Requested device(s) '{0}' have been successfully added to the Cisco Catalyst Center and their
+ addition has been verified.""".format(str(self.have['devices_in_playbook']))
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the device addition
+ task may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('update_interface_details'):
+ interface_update_flag = True
+ interface_names_list = self.config[0].get('update_interface_details').get('interface_name')
+
+ for device_ip in device_ips:
+ for interface_name in interface_names_list:
+ if not self.check_interface_details(device_ip, interface_name):
+ interface_update_flag = False
+ break
+
+ if interface_update_flag:
+ self.status = "success"
+ msg = "Interface details updated and verified successfully for devices {0}.".format(device_ips)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the update
+ interface details task may not have executed successfully.""", "INFO")
+
+ if credential_update and device_type == "NETWORK_DEVICE":
+ credential_update_flag = self.check_credential_update()
+
+ if credential_update_flag:
+ self.status = "success"
+ msg = "Device credentials and details updated and verified successfully in Cisco Catalyst Center."
+ self.log(msg, "INFO")
+ else:
+ self.log("Playbook parameter does not match with Cisco Catalyst Center, meaning device updation task not executed properly.", "INFO")
+ elif device_type != "NETWORK_DEVICE":
+ self.log("""Unable to compare the parameter for device type '{0}' in the playbook with the one in Cisco Catalyst Center."""
+ .format(device_type), "WARNING")
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_exist = self.is_udf_exist(field_name)
+
+ if udf_exist:
+ self.status = "success"
+ msg = "Global UDF {0} created and verified successfully".format(field_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter and Cisco Catalyst Center detected, indicating that
+ the task of creating Global UDF may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('role'):
+ device_role_flag = True
+
+ for device_ip in device_ips:
+ if not self.check_device_role(device_ip):
+ device_role_flag = False
+ break
+
+ if device_role_flag:
+ self.status = "success"
+ msg = "Device roles updated and verified successfully."
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter 'role' and Cisco Catalyst Center detected, indicating the
+ device role update task may not have executed successfully.""", "INFO")
+
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0].get('provision_wired_device')
+ provision_wired_flag = True
+ provision_device_list = []
+
+ for prov_dict in provision_wired_list:
+ device_ip = prov_dict['device_ip']
+ provision_device_list.append(device_ip)
+ if not self.get_provision_wired_device(device_ip):
+ provision_wired_flag = False
+ break
+
+ if provision_wired_flag:
+ self.status = "success"
+ msg = "Wired devices {0} get provisioned and verified successfully.".format(provision_device_list)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook's input and Cisco Catalyst Center detected, indicating that
+ the provisioning task may not have executed successfully.""", "INFO")
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of Device and Global UDF in Cisco Catalyst Center.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified Devices or Global UDF deleted from Cisco Catalyst Center.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+ input_devices = self.have["want_device"]
+ device_in_ccc = self.device_exists_in_ccc()
+
+ if self.config[0].get('add_user_defined_field'):
+ udf_field_list = self.config[0].get('add_user_defined_field')
+ for udf in udf_field_list:
+ field_name = udf.get('name')
+ udf_id = self.get_udf_id(field_name)
+
+ if udf_id is None:
+ self.status = "success"
+ msg = """Global UDF named '{0}' has been successfully deleted from Cisco Catalyst Center and the deletion
+ has been verified.""".format(field_name)
+ self.log(msg, "INFO")
+
+ return self
+
+ device_delete_flag = True
+ for device_ip in input_devices:
+ if device_ip in device_in_ccc:
+ device_after_deletion = device_ip
+ device_delete_flag = False
+ break
+
+ if device_delete_flag:
+ self.status = "success"
+ self.msg = "Requested device(s) '{0}' deleted from Cisco Catalyst Center and the deletion has been verified.".format(str(input_devices))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between playbook parameter device({0}) and Cisco Catalyst Center detected, indicating that
+ the device deletion task may not have executed successfully.""".format(device_after_deletion), "INFO")
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'type': 'str', 'required': True, },
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_device = Inventory(module)
+ state = ccc_device.params.get("state")
+
+ if state not in ccc_device.supported_states:
+ ccc_device.status = "invalid"
+ ccc_device.msg = "State {0} is invalid".format(state)
+ ccc_device.check_return_status()
+
+ ccc_device.validate_input().check_return_status()
+ config_verify = ccc_device.params.get("config_verify")
+
+ for config in ccc_device.validated_config:
+ ccc_device.reset_values()
+ ccc_device.get_want(config).check_return_status()
+ ccc_device.get_have(config).check_return_status()
+ ccc_device.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_device.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_device.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_settings_intent.py b/ansible_collections/cisco/dnac/plugins/modules/network_settings_intent.py
new file mode 100644
index 000000000..49d6fa5d4
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_settings_intent.py
@@ -0,0 +1,2225 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to perform operations on global pool, reserve pool and network in DNAC."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan']
+
+DOCUMENTATION = r"""
+---
+module: network_settings_intent
+short_description: Resource module for IP Address pools and network functions
+description:
+- Manage operations on Global Pool, Reserve Pool, Network resources.
+- API to create/update/delete global pool.
+- API to reserve/update/delete an ip subpool from the global pool.
+- API to update network settings for DHCP, Syslog, SNMP, NTP, Network AAA, Client and Endpoint AAA,
+ and/or DNS center server settings.
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.intent_params
+author: Muthu Rakesh (@MUTHU-RAKESH-27)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of global pool, reserved pool, network being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ global_pool_details:
+ description: Manages IPv4 and IPv6 IP pools in the global level.
+ type: dict
+ suboptions:
+ settings:
+ description: Global Pool's settings.
+ type: dict
+ suboptions:
+ ip_pool:
+ description: Contains a list of global IP pool configurations.
+ elements: dict
+ type: list
+ suboptions:
+ dhcp_server_ips:
+ description: >
+ The DHCP server IPs responsible for automatically assigning IP addresses
+ and network configuration parameters to devices on a local network.
+ elements: str
+ type: list
+ dns_server_ips:
+ description: Responsible for translating domain names into corresponding IP addresses.
+ elements: str
+ type: list
+ gateway:
+ description: Serves as an entry or exit point for data traffic between networks.
+ type: str
+ ip_address_space:
+ description: IP address space either IPv4 or IPv6.
+ type: str
+ cidr:
+ description: >
+ Defines the IP pool's Classless Inter-Domain Routing block,
+ enabling systematic IP address distribution within a network.
+ type: str
+ prev_name:
+ description: >
+ The former identifier for the global pool. It should be used
+ exclusively when you need to update the global pool's name.
+ type: str
+ name:
+ description: Specifies the name assigned to the Global IP Pool.
+ type: str
+ pool_type:
+ description: >
+ Includes both the Generic Ip Pool and Tunnel Ip Pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ Tunnel - Designated for the tunnel interfaces to encapsulate packets
+ within the network protocol. It is used in VPN connections,
+ GRE tunnels, or other types of overlay networks.
+ default: Generic
+ choices: [Generic, Tunnel]
+ type: str
+
+ reserve_pool_details:
+ description: Reserved IP subpool details from the global pool.
+ type: dict
+ suboptions:
+ ipv4_dhcp_servers:
+ description: Specifies the IPv4 addresses for DHCP servers, for example, "1.1.1.1".
+ elements: str
+ type: list
+ ipv4_dns_servers:
+ description: Specifies the IPv4 addresses for DNS servers, for example, "4.4.4.4".
+ elements: str
+ type: list
+ ipv4_gateway:
+ description: Provides the gateway's IPv4 address, for example, "175.175.0.1".
+ type: str
+ version_added: 4.0.0
+ ipv4_global_pool:
+ description: IP v4 Global pool address with cidr, example 175.175.0.0/16.
+ type: str
+ ipv4_prefix:
+ description: ip4 prefix length is enabled or ipv4 total Host input is enabled
+ type: bool
+ ipv4_prefix_length:
+ description: The ipv4 prefix length is required when ipv4_prefix value is true.
+ type: int
+ ipv4_subnet:
+ description: Indicates the IPv4 subnet address, for example, "175.175.0.0".
+ type: str
+ ipv4_total_host:
+ description: The total number of hosts for IPv4, required when the 'ipv4_prefix' is set to false.
+ type: int
+ ipv6_address_space:
+ description: >
+ Determines whether both IPv6 and IPv4 inputs are required.
+ If set to false, only IPv4 inputs are required.
+ If set to true, both IPv6 and IPv4 inputs are required.
+ type: bool
+ ipv6_dhcp_servers:
+ description: >
+ Specifies the IPv6 addresses for DHCP servers in the format.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0001:0001".
+ elements: str
+ type: list
+ ipv6_dns_servers:
+ description: >
+ Specifies the IPv6 addresses for DNS servers.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0002:0002".
+ elements: str
+ type: list
+ ipv6_gateway:
+ description: >
+ Provides the gateway's IPv6 address.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0003:0003".
+ type: str
+ ipv6_global_pool:
+ description: >
+ IPv6 Global pool address with cidr this is required when ipv6_address_space
+ value is true, example 2001 db8 85a3 /64.
+ type: str
+ ipv6_prefix:
+ description: >
+ Ipv6 prefix value is true, the ip6 prefix length input field is enabled,
+ if it is false ipv6 total Host input is enable.
+ type: bool
+ ipv6_prefix_length:
+ description: IPv6 prefix length is required when the ipv6_prefix value is true.
+ type: int
+ ipv6_subnet:
+ description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
+ type: str
+ ipv6_total_host:
+ description: The total number of hosts for IPv6 is required if the 'ipv6_prefix' is set to false.
+ type: int
+ name:
+ description: Name of the reserve IP subpool.
+ type: str
+ prev_name:
+ description: The former name associated with the reserved IP sub-pool.
+ type: str
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved.
+ type: str
+ slaac_support:
+ description: >
+ Allows devices on IPv6 networks to self-configure their
+ IP addresses autonomously, eliminating the need for manual setup.
+ type: bool
+ pool_type:
+ description: Type of the reserve ip sub pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ LAN - Used for the devices and the resources within the Local Area Network
+ such as device connectivity, internal communication, or services.
+ Management - Used for the management purposes such as device management interfaces,
+ management access, or other administrative functions.
+ Service - Used for the network services and application such as DNS (Domain Name System),
+ DHCP (Dynamic Host Configuration Protocol), NTP (Network Time Protocol).
+ WAN - Used for the devices and resources with the Wide Area Network such as remote
+ sites interconnection with other network or services hosted within WAN.
+ default: Generic
+ choices: [Generic, LAN, Management, Service, WAN]
+ type: str
+ network_management_details:
+ description: Set default network settings for the site
+ type: dict
+ suboptions:
+ settings:
+ description: Network management details settings.
+ type: dict
+ suboptions:
+ client_and_endpoint_aaa:
+ description: Network V2's clientAndEndpoint_aaa.
+ suboptions:
+ ip_address:
+ description: IP address for ISE serve (eg 1.1.1.4).
+ type: str
+ network:
+ description: IP address for AAA or ISE server (eg 2.2.2.1).
+ type: str
+ protocol:
+ description: Protocol for AAA or ISE serve (eg RADIUS).
+ type: str
+ servers:
+ description: Server type AAA or ISE server (eg AAA).
+ type: str
+ shared_secret:
+ description: Shared secret for ISE server.
+ type: str
+ type: dict
+ dhcp_server:
+ description: DHCP Server IP (eg 1.1.1.1).
+ elements: str
+ type: list
+ dns_server:
+ description: Network V2's dnsServer.
+ suboptions:
+ domain_name:
+ description: Domain Name of DHCP (eg; cisco).
+ type: str
+ primary_ip_address:
+ description: Primary IP Address for DHCP (eg 2.2.2.2).
+ type: str
+ secondary_ip_address:
+ description: Secondary IP Address for DHCP (eg 3.3.3.3).
+ type: str
+ type: dict
+ message_of_the_day:
+ description: Network V2's messageOfTheday.
+ suboptions:
+ banner_message:
+ description: Massage for Banner message (eg; Good day).
+ type: str
+ retain_existing_banner:
+ description: Retain existing Banner Message (eg "true" or "false").
+ type: str
+ type: dict
+ netflow_collector:
+ description: Network V2's netflowcollector.
+ suboptions:
+ ip_address:
+ description: IP Address for NetFlow collector (eg 3.3.3.1).
+ type: str
+ port:
+ description: Port for NetFlow Collector (eg; 443).
+ type: int
+ type: dict
+ network_aaa:
+ description: Network V2's network_aaa.
+ suboptions:
+ ip_address:
+ description: IP address for AAA and ISE server (eg 1.1.1.1).
+ type: str
+ network:
+ description: IP Address for AAA or ISE server (eg 2.2.2.2).
+ type: str
+ protocol:
+ description: Protocol for AAA or ISE serve (eg RADIUS).
+ type: str
+ servers:
+ description: Server type for AAA Network (eg AAA).
+ type: str
+ shared_secret:
+ description: Shared secret for ISE Server.
+ type: str
+ type: dict
+ ntp_server:
+ description: IP address for NTP server (eg 1.1.1.2).
+ elements: str
+ type: list
+ snmp_server:
+ description: Network V2's snmpServer.
+ suboptions:
+ configure_dnac_ip:
+ description: Configuration Cisco Catalyst Center IP for SNMP Server (eg true).
+ type: bool
+ ip_addresses:
+ description: IP Address for SNMP Server (eg 4.4.4.1).
+ elements: str
+ type: list
+ type: dict
+ syslog_server:
+ description: Network V2's syslogServer.
+ suboptions:
+ configure_dnac_ip:
+ description: Configuration Cisco Catalyst Center IP for syslog server (eg true).
+ type: bool
+ ip_addresses:
+ description: IP Address for syslog server (eg 4.4.4.4).
+ elements: str
+ type: list
+ type: dict
+ timezone:
+ description: Input for time zone (eg Africa/Abidjan).
+ type: str
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved.
+ type: str
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Method used are
+ network_settings.NetworkSettings.create_global_pool,
+ network_settings.NetworkSettings.delete_global_ip_pool,
+ network_settings.NetworkSettings.update_global_pool,
+ network_settings.NetworkSettings.release_reserve_ip_subpool,
+ network_settings.NetworkSettings.reserve_ip_subpool,
+ network_settings.NetworkSettings.update_reserve_ip_subpool,
+ network_settings.NetworkSettings.update_network_v2,
+
+ - Paths used are
+ post /dna/intent/api/v1/global-pool,
+ delete /dna/intent/api/v1/global-pool/{id},
+ put /dna/intent/api/v1/global-pool,
+ post /dna/intent/api/v1/reserve-ip-subpool/{siteId},
+ delete /dna/intent/api/v1/reserve-ip-subpool/{id},
+ put /dna/intent/api/v1/reserve-ip-subpool/{siteId},
+ put /dna/intent/api/v2/network/{siteId},
+
+"""
+
+EXAMPLES = r"""
+- name: Create global pool, reserve an ip pool and network
+ cisco.dnac.network_settings_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: string
+ gateway: string
+ ip_address_space: string
+ cidr: string
+ pool_type: Generic
+ dhcp_server_ips: list
+ dns_server_ips: list
+ reserve_pool_details:
+ ipv6_address_space: True
+ ipv4_global_pool: string
+ ipv4_prefix: True
+ ipv4_prefix_length: 9
+ ipv4_subnet: string
+ name: string
+ ipv6_prefix: True
+ ipv6_prefix_length: 64
+ ipv6_global_pool: string
+ ipv6_subnet: string
+ site_name: string
+ slaac_support: True
+ pool_type: LAN
+ network_management_details:
+ settings:
+ dhcp_server: list
+ dns_server:
+ domain_name: string
+ primary_ip_address: string
+ secondary_ip_address: string
+ client_and_endpoint_aaa:
+ network: string
+ protocol: string
+ servers: string
+ message_of_the_day:
+ banner_message: string
+ retain_existing_banner: string
+ netflow_collector:
+ ip_address: string
+ port: 443
+ network_aaa:
+ network: string
+ protocol: string
+ servers: string
+ ntp_server: list
+ snmp_server:
+ configure_dnac_ip: True
+ ip_addresses: list
+ syslog_server:
+ configure_dnac_ip: True
+ ip_addresses: list
+ site_name: string
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of global pool
+response_1:
+ description: A dictionary or list with the response returned by the Cisco DNA Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+
+# Case_2: Successful creation/updation/deletion of reserve pool
+response_2:
+ description: A dictionary or list with the response returned by the Cisco DNA Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+
+# Case_3: Successful creation/updation of network
+response_3:
+ description: A dictionary or list with the response returned by the Cisco DNA Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+"""
+
+import copy
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+ dnac_compare_equality,
+)
+
+
+class NetworkSettings(DnacBase):
+ """Class containing member attributes for network intent module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.result["response"] = [
+ {"globalPool": {"response": {}, "msg": {}}},
+ {"reservePool": {"response": {}, "msg": {}}},
+ {"network": {"response": {}, "msg": {}}}
+ ]
+ self.global_pool_obj_params = self.get_obj_params("GlobalPool")
+ self.reserve_pool_obj_params = self.get_obj_params("ReservePool")
+ self.network_obj_params = self.get_obj_params("Network")
+
+ def validate_input(self):
+ """
+ Checks if the configuration parameters provided in the playbook
+ meet the expected structure and data types,
+ as defined in the 'temp_spec' dictionary.
+
+ Parameters:
+ None
+
+ Returns:
+ self
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ # temp_spec is the specification for the expected structure of configuration parameters
+ temp_spec = {
+ "global_pool_details": {
+ "type": 'dict',
+ "settings": {
+ "type": 'dict',
+ "ip_pool": {
+ "type": 'list',
+ "ip_address_space": {"type": 'string'},
+ "dhcp_server_ips": {"type": 'list'},
+ "dns_server_ips": {"type": 'list'},
+ "gateway": {"type": 'string'},
+ "cidr": {"type": 'string'},
+ "name": {"type": 'string'},
+ "prevName": {"type": 'string'},
+ "pool_type": {
+ "type": 'string',
+ "choices": ["Generic", "LAN", "Management", "Service", "WAN"]
+ },
+ }
+ }
+ },
+ "reserve_pool_details": {
+ "type": 'dict',
+ "name": {"type": 'string'},
+ "prevName": {"type": 'string'},
+ "ipv6_address_space": {"type": 'bool'},
+ "ipv4_global_pool": {"type": 'string'},
+ "ipv4_prefix": {"type": 'bool'},
+ "ipv4_prefix_length": {"type": 'string'},
+ "ipv4_subnet": {"type": 'string'},
+ "ipv4GateWay": {"type": 'string'},
+ "ipv4DhcpServers": {"type": 'list'},
+ "ipv4_dns_servers": {"type": 'list'},
+ "ipv6_global_pool": {"type": 'string'},
+ "ipv6_prefix": {"type": 'bool'},
+ "ipv6_prefix_length": {"type": 'integer'},
+ "ipv6_subnet": {"type": 'string'},
+ "ipv6GateWay": {"type": 'string'},
+ "ipv6DhcpServers": {"type": 'list'},
+ "ipv6DnsServers": {"type": 'list'},
+ "ipv4TotalHost": {"type": 'integer'},
+ "ipv6TotalHost": {"type": 'integer'},
+ "slaac_support": {"type": 'bool'},
+ "site_name": {"type": 'string'},
+ "pool_type": {
+ "type": 'string',
+ "choices": ["Generic", "LAN", "Management", "Service", "WAN"]
+ },
+ },
+ "network_management_details": {
+ "type": 'dict',
+ "settings": {
+ "type": 'dict',
+ "dhcp_server": {"type": 'list'},
+ "dns_server": {
+ "type": 'dict',
+ "domain_name": {"type": 'string'},
+ "primary_ip_address": {"type": 'string'},
+ "secondary_ip_address": {"type": 'string'}
+ },
+ "syslog_server": {
+ "type": 'dict',
+ "ip_addresses": {"type": 'list'},
+ "configure_dnac_ip": {"type": 'bool'}
+ },
+ "snmp_server": {
+ "type": 'dict',
+ "ip_addresses": {"type": 'list'},
+ "configure_dnac_ip": {"type": 'bool'}
+ },
+ "netflow_collector": {
+ "type": 'dict',
+ "ip_address": {"type": 'string'},
+ "port": {"type": 'integer'},
+ },
+ "timezone": {"type": 'string'},
+ "ntp_server": {"type": 'list'},
+ "message_of_the_day": {
+ "type": 'dict',
+ "banner_message": {"type": 'string'},
+ "retain_existing_banner": {"type": 'bool'},
+ },
+ "network_aaa": {
+ "type": 'dict',
+ "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "ip_address": {"type": 'string'},
+ "network": {"type": 'string'},
+ "protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
+ "shared_secret": {"type": 'string'}
+
+ },
+ "client_and_endpoint_aaa": {
+ "type": 'dict',
+ "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "ip_address": {"type": 'string'},
+ "network": {"type": 'string'},
+ "protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
+ "shared_secret": {"type": 'string'}
+ }
+ },
+ "site_name": {"type": 'string'},
+ }
+ }
+
+ # Validate playbook params against the specification (temp_spec)
+ self.config = self.camel_to_snake_case(self.config)
+ valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input from the playbook"
+ self.status = "success"
+ return self
+
+ def requires_update(self, have, want, obj_params):
+ """
+ Check if the template config given requires update by comparing
+ current information wih the requested information.
+
+ This method compares the current global pool, reserve pool,
+ or network details from Cisco DNA Center with the user-provided details
+ from the playbook, using a specified schema for comparison.
+
+ Parameters:
+ have (dict) - Current information from the Cisco DNA Center
+ (global pool, reserve pool, network details)
+ want (dict) - Users provided information from the playbook
+ obj_params (list of tuples) - A list of parameter mappings specifying which
+ Cisco DNA Center parameters (dnac_param) correspond to
+ the user-provided parameters (ansible_param).
+
+ Returns:
+ bool - True if any parameter specified in obj_params differs between
+ current_obj and requested_obj, indicating that an update is required.
+ False if all specified parameters are equal.
+
+ """
+
+ current_obj = have
+ requested_obj = want
+ self.log("Current State (have): {0}".format(current_obj), "DEBUG")
+ self.log("Desired State (want): {0}".format(requested_obj), "DEBUG")
+
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def get_obj_params(self, get_object):
+ """
+ Get the required comparison obj_params value
+
+ Parameters:
+ get_object (str) - identifier for the required obj_params
+
+ Returns:
+ obj_params (list) - obj_params value for comparison.
+ """
+
+ try:
+ if get_object == "GlobalPool":
+ obj_params = [
+ ("settings", "settings"),
+ ]
+ elif get_object == "ReservePool":
+ obj_params = [
+ ("name", "name"),
+ ("type", "type"),
+ ("ipv6AddressSpace", "ipv6AddressSpace"),
+ ("ipv4GlobalPool", "ipv4GlobalPool"),
+ ("ipv4Prefix", "ipv4Prefix"),
+ ("ipv4PrefixLength", "ipv4PrefixLength"),
+ ("ipv4GateWay", "ipv4GateWay"),
+ ("ipv4DhcpServers", "ipv4DhcpServers"),
+ ("ipv4DnsServers", "ipv4DnsServers"),
+ ("ipv6GateWay", "ipv6GateWay"),
+ ("ipv6DhcpServers", "ipv6DhcpServers"),
+ ("ipv6DnsServers", "ipv6DnsServers"),
+ ("ipv4TotalHost", "ipv4TotalHost"),
+ ("slaacSupport", "slaacSupport")
+ ]
+ elif get_object == "Network":
+ obj_params = [
+ ("settings", "settings"),
+ ("site_name", "site_name")
+ ]
+ else:
+ raise ValueError("Received an unexpected value for 'get_object': {0}"
+ .format(get_object))
+ except Exception as msg:
+ self.log("Received exception: {0}".format(msg), "CRITICAL")
+
+ return obj_params
+
+ def get_site_id(self, site_name):
+ """
+ Get the site id from the site name.
+ Use check_return_status() to check for failure
+
+ Parameters:
+ site_name (str) - Site name
+
+ Returns:
+ str or None - The Site Id if found, or None if not found or error
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ self.log("Received API response from 'get_site': {0}".format(response), "DEBUG")
+ if not response:
+ self.log("Failed to retrieve the site ID for the site name: {0}"
+ .format(site_name), "ERROR")
+ return None
+
+ _id = response.get("response")[0].get("id")
+ self.log("Site ID for site name '{0}': {1}".format(site_name, _id), "DEBUG")
+ except Exception as msg:
+ self.log("Exception occurred while retrieving site_id from the site_name: {0}"
+ .format(msg), "CRITICAL")
+ return None
+
+ return _id
+
+ def get_global_pool_params(self, pool_info):
+ """
+ Process Global Pool params from playbook data for Global Pool config in Cisco DNA Center
+
+ Parameters:
+ pool_info (dict) - Playbook data containing information about the global pool
+
+ Returns:
+ dict or None - Processed Global Pool data in a format suitable
+ for Cisco DNA Center configuration, or None if pool_info is empty.
+ """
+
+ if not pool_info:
+ self.log("Global Pool is empty", "INFO")
+ return None
+
+ self.log("Global Pool Details: {0}".format(pool_info), "DEBUG")
+ global_pool = {
+ "settings": {
+ "ippool": [{
+ "dhcpServerIps": pool_info.get("dhcpServerIps"),
+ "dnsServerIps": pool_info.get("dnsServerIps"),
+ "ipPoolCidr": pool_info.get("ipPoolCidr"),
+ "ipPoolName": pool_info.get("ipPoolName"),
+ "type": pool_info.get("ipPoolType").capitalize()
+ }]
+ }
+ }
+ self.log("Formated global pool details: {0}".format(global_pool), "DEBUG")
+ global_ippool = global_pool.get("settings").get("ippool")[0]
+ if pool_info.get("ipv6") is False:
+ global_ippool.update({"IpAddressSpace": "IPv4"})
+ else:
+ global_ippool.update({"IpAddressSpace": "IPv6"})
+
+ self.log("ip_address_space: {0}".format(global_ippool.get("IpAddressSpace")), "DEBUG")
+ if not pool_info["gateways"]:
+ global_ippool.update({"gateway": ""})
+ else:
+ global_ippool.update({"gateway": pool_info.get("gateways")[0]})
+
+ return global_pool
+
+ def get_reserve_pool_params(self, pool_info):
+ """
+ Process Reserved Pool parameters from playbook data
+ for Reserved Pool configuration in Cisco DNA Center
+
+ Parameters:
+ pool_info (dict) - Playbook data containing information about the reserved pool
+
+ Returns:
+ reserve_pool (dict) - Processed Reserved pool data
+ in the format suitable for the Cisco DNA Center config
+ """
+
+ reserve_pool = {
+ "name": pool_info.get("groupName"),
+ "site_id": pool_info.get("siteId"),
+ }
+ if len(pool_info.get("ipPools")) == 1:
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6AddressSpace": "False"
+ })
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay": pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+ reserve_pool.update({"ipv6AddressSpace": "False"})
+ elif len(pool_info.get("ipPools")) == 2:
+ if not pool_info.get("ipPools")[0].get("ipv6"):
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6AddressSpace": "True",
+ "ipv6DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
+ "ipv6DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
+
+ })
+
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay":
+ pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+
+ if pool_info.get("ipPools")[1].get("gateways") != []:
+ reserve_pool.update({"ipv6GateWay":
+ pool_info.get("ipPools")[1].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv6GateWay": ""})
+
+ elif not pool_info.get("ipPools")[1].get("ipv6"):
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
+ "ipv6AddressSpace": "True",
+ "ipv6DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps")
+ })
+ if pool_info.get("ipPools")[1].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay":
+ pool_info.get("ipPools")[1].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv6GateWay":
+ pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv6GateWay": ""})
+ reserve_pool.update({"slaacSupport": True})
+ self.log("Formatted reserve pool details: {0}".format(reserve_pool), "DEBUG")
+ return reserve_pool
+
+ def get_network_params(self, site_id):
+ """
+ Process the Network parameters from the playbook
+ for Network configuration in Cisco DNA Center
+
+ Parameters:
+ site_id (str) - The Site ID for which network parameters are requested
+
+ Returns:
+ dict or None: Processed Network data in a format
+ suitable for Cisco DNA Center configuration, or None
+ if the response is not a dictionary or there was an error.
+ """
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function='get_network_v2',
+ params={"site_id": site_id}
+ )
+ self.log("Received API response from 'get_network_v2': {0}".format(response), "DEBUG")
+ if not isinstance(response, dict):
+ self.log("Failed to retrieve the network details - "
+ "Response is not a dictionary", "ERROR")
+ return None
+
+ # Extract various network-related details from the response
+ all_network_details = response.get("response")
+ dhcp_details = get_dict_result(all_network_details, "key", "dhcp.server")
+ dns_details = get_dict_result(all_network_details, "key", "dns.server")
+ snmp_details = get_dict_result(all_network_details, "key", "snmp.trap.receiver")
+ syslog_details = get_dict_result(all_network_details, "key", "syslog.server")
+ netflow_details = get_dict_result(all_network_details, "key", "netflow.collector")
+ ntpserver_details = get_dict_result(all_network_details, "key", "ntp.server")
+ timezone_details = get_dict_result(all_network_details, "key", "timezone.site")
+ messageoftheday_details = get_dict_result(all_network_details, "key", "device.banner")
+ network_aaa = get_dict_result(all_network_details, "key", "aaa.network.server.1")
+ network_aaa2 = get_dict_result(all_network_details, "key", "aaa.network.server.2")
+ network_aaa_pan = get_dict_result(all_network_details, "key", "aaa.server.pan.network")
+ clientAndEndpoint_aaa = get_dict_result(all_network_details, "key", "aaa.endpoint.server.1")
+ clientAndEndpoint_aaa2 = get_dict_result(all_network_details,
+ "key",
+ "aaa.endpoint.server.2")
+ clientAndEndpoint_aaa_pan = \
+ get_dict_result(all_network_details, "key", "aaa.server.pan.endpoint")
+
+ # Prepare the network details for Cisco DNA Center configuration
+ network_details = {
+ "settings": {
+ "snmpServer": {
+ "configureDnacIP": snmp_details.get("value")[0].get("configureDnacIP"),
+ "ipAddresses": snmp_details.get("value")[0].get("ipAddresses"),
+ },
+ "syslogServer": {
+ "configureDnacIP": syslog_details.get("value")[0].get("configureDnacIP"),
+ "ipAddresses": syslog_details.get("value")[0].get("ipAddresses"),
+ },
+ "netflowcollector": {
+ "ipAddress": netflow_details.get("value")[0].get("ipAddress"),
+ "port": netflow_details.get("value")[0].get("port")
+ },
+ "timezone": timezone_details.get("value")[0],
+ }
+ }
+ network_settings = network_details.get("settings")
+ if dhcp_details and dhcp_details.get("value") != []:
+ network_settings.update({"dhcpServer": dhcp_details.get("value")})
+ else:
+ network_settings.update({"dhcpServer": [""]})
+
+ if dns_details is not None:
+ network_settings.update({
+ "dnsServer": {
+ "domainName": dns_details.get("value")[0].get("domainName"),
+ "primaryIpAddress": dns_details.get("value")[0].get("primaryIpAddress"),
+ "secondaryIpAddress": dns_details.get("value")[0].get("secondaryIpAddress")
+ }
+ })
+
+ if ntpserver_details and ntpserver_details.get("value") != []:
+ network_settings.update({"ntpServer": ntpserver_details.get("value")})
+ else:
+ network_settings.update({"ntpServer": [""]})
+
+ if messageoftheday_details is not None:
+ network_settings.update({
+ "messageOfTheday": {
+ "bannerMessage": messageoftheday_details.get("value")[0].get("bannerMessage"),
+ }
+ })
+ retain_existing_banner = messageoftheday_details.get("value")[0] \
+ .get("retainExistingBanner")
+ if retain_existing_banner is True:
+ network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "true"
+ })
+ else:
+ network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "false"
+ })
+
+ if network_aaa and network_aaa_pan:
+ aaa_pan_value = network_aaa_pan.get("value")[0]
+ aaa_value = network_aaa.get("value")[0]
+ if aaa_pan_value == "None":
+ network_settings.update({
+ "network_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": network_aaa2.get("value")[0].get("ipAddress"),
+ "servers": "AAA"
+ }
+ })
+ else:
+ network_settings.update({
+ "network_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": aaa_pan_value,
+ "servers": "ISE"
+ }
+ })
+
+ if clientAndEndpoint_aaa and clientAndEndpoint_aaa_pan:
+ aaa_pan_value = clientAndEndpoint_aaa_pan.get("value")[0]
+ aaa_value = clientAndEndpoint_aaa.get("value")[0]
+ if aaa_pan_value == "None":
+ network_settings.update({
+ "clientAndEndpoint_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": clientAndEndpoint_aaa2.get("value")[0].get("ipAddress"),
+ "servers": "AAA"
+ }
+ })
+ else:
+ network_settings.update({
+ "clientAndEndpoint_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": aaa_pan_value,
+ "servers": "ISE"
+ }
+ })
+
+ self.log("Formatted playbook network details: {0}".format(network_details), "DEBUG")
+ return network_details
+
+ def global_pool_exists(self, name):
+ """
+ Check if the Global Pool with the given name exists
+
+ Parameters:
+ name (str) - The name of the Global Pool to check for existence
+
+ Returns:
+ dict - A dictionary containing information about the Global Pool's existence:
+ - 'exists' (bool): True if the Global Pool exists, False otherwise.
+ - 'id' (str or None): The ID of the Global Pool if it exists, or None if it doesn't.
+ - 'details' (dict or None): Details of the Global Pool if it exists, else None.
+ """
+
+ global_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_global_pool",
+ )
+ if not isinstance(response, dict):
+ self.log("Failed to retrieve the global pool details - "
+ "Response is not a dictionary", "CRITICAL")
+ return global_pool
+
+ all_global_pool_details = response.get("response")
+ global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name)
+ self.log("Global ip pool name: {0}".format(name), "DEBUG")
+ self.log("Global pool details: {0}".format(global_pool_details), "DEBUG")
+ if not global_pool_details:
+ self.log("Global pool {0} does not exist".format(name), "INFO")
+ return global_pool
+ global_pool.update({"exists": True})
+ global_pool.update({"id": global_pool_details.get("id")})
+ global_pool["details"] = self.get_global_pool_params(global_pool_details)
+
+ self.log("Formatted global pool details: {0}".format(global_pool), "DEBUG")
+ return global_pool
+
+ def reserve_pool_exists(self, name, site_name):
+ """
+ Check if the Reserved pool with the given name exists in a specific site
+ Use check_return_status() to check for failure
+
+ Parameters:
+ name (str) - The name of the Reserved pool to check for existence.
+ site_name (str) - The name of the site where the Reserved pool is located.
+
+ Returns:
+ dict - A dictionary containing information about the Reserved pool's existence:
+ - 'exists' (bool): True if the Reserved pool exists in the specified site, else False.
+ - 'id' (str or None): The ID of the Reserved pool if it exists, or None if it doesn't.
+ - 'details' (dict or None): Details of the Reserved pool if it exists, or else None.
+ """
+
+ reserve_pool = {
+ "exists": False,
+ "details": None,
+ "id": None,
+ "success": True
+ }
+ site_id = self.get_site_id(site_name)
+ self.log("Site ID for the site name {0}: {1}".format(site_name, site_id), "DEBUG")
+ if not site_id:
+ reserve_pool.update({"success": False})
+ self.msg = "Failed to get the site id from the site name {0}".format(site_name)
+ self.status = "failed"
+ return reserve_pool
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_reserve_ip_subpool",
+ params={"siteId": site_id}
+ )
+ if not isinstance(response, dict):
+ reserve_pool.update({"success": False})
+ self.msg = "Error in getting reserve pool - Response is not a dictionary"
+ self.status = "exited"
+ return reserve_pool
+
+ all_reserve_pool_details = response.get("response")
+ reserve_pool_details = get_dict_result(all_reserve_pool_details, "groupName", name)
+ if not reserve_pool_details:
+ self.log("Reserved pool {0} does not exist in the site {1}"
+ .format(name, site_name), "DEBUG")
+ return reserve_pool
+
+ reserve_pool.update({"exists": True})
+ reserve_pool.update({"id": reserve_pool_details.get("id")})
+ reserve_pool.update({"details": self.get_reserve_pool_params(reserve_pool_details)})
+
+ self.log("Reserved pool details: {0}".format(reserve_pool.get("details")), "DEBUG")
+ self.log("Reserved pool id: {0}".format(reserve_pool.get("id")), "DEBUG")
+ return reserve_pool
+
+ def get_have_global_pool(self, config):
+ """
+ Get the current Global Pool information from
+ Cisco DNA Center based on the provided playbook details.
+ check this API using check_return_status.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool configuration.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ global_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ global_pool_settings = config.get("global_pool_details").get("settings")
+ if global_pool_settings is None:
+ self.msg = "settings in global_pool_details is missing in the playbook"
+ self.status = "failed"
+ return self
+
+ global_pool_ippool = global_pool_settings.get("ip_pool")
+ if global_pool_ippool is None:
+ self.msg = "ip_pool in global_pool_details is missing in the playbook"
+ self.status = "failed"
+ return self
+
+ name = global_pool_ippool[0].get("name")
+ if name is None:
+ self.msg = "Mandatory Parameter name required"
+ self.status = "failed"
+ return self
+
+ # If the Global Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ global_pool = self.global_pool_exists(name)
+ self.log("Global pool details: {0}".format(global_pool), "DEBUG")
+ prev_name = global_pool_ippool[0].get("prev_name")
+ if global_pool.get("exists") is False and \
+ prev_name is not None:
+ global_pool = self.global_pool_exists(prev_name)
+ if global_pool.get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in global_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
+
+ self.log("Global pool exists: {0}".format(global_pool.get("exists")), "DEBUG")
+ self.log("Current Site: {0}".format(global_pool.get("details")), "DEBUG")
+ self.have.update({"globalPool": global_pool})
+ self.msg = "Collecting the global pool details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_have_reserve_pool(self, config):
+ """
+ Get the current Reserved Pool information from Cisco DNA Center
+ based on the provided playbook details.
+ Check this API using check_return_status
+
+ Parameters:
+ config (list of dict) - Playbook details containing Reserved Pool configuration.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ reserve_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ reserve_pool_details = config.get("reserve_pool_details")
+ name = reserve_pool_details.get("name")
+ if name is None:
+ self.msg = "Mandatory Parameter name required in reserve_pool_details\n"
+ self.status = "failed"
+ return self
+
+ site_name = reserve_pool_details.get("site_name")
+ self.log("Site Name: {0}".format(site_name), "DEBUG")
+ if site_name is None:
+ self.msg = "Missing parameter 'site_name' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ # Check if the Reserved Pool exists in Cisco DNA Center
+ # based on the provided name and site name
+ reserve_pool = self.reserve_pool_exists(name, site_name)
+ if not reserve_pool.get("success"):
+ return self.check_return_status()
+ self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
+
+ # If the Reserved Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ prev_name = reserve_pool_details.get("prev_name")
+ if reserve_pool.get("exists") is False and \
+ prev_name is not None:
+ reserve_pool = self.reserve_pool_exists(prev_name, site_name)
+ if not reserve_pool.get("success"):
+ return self.check_return_status()
+
+ # If the previous name doesn't exist in Cisco DNA Center, return with error
+ if reserve_pool.get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
+
+ self.log("Reserved pool exists: {0}".format(reserve_pool.get("exists")), "DEBUG")
+ self.log("Reserved pool: {0}".format(reserve_pool.get("details")), "DEBUG")
+
+ # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean)
+ if reserve_pool.get("exists"):
+ reserve_pool_details = reserve_pool.get("details")
+ if reserve_pool_details.get("ipv6AddressSpace") == "False":
+ reserve_pool_details.update({"ipv6AddressSpace": False})
+ else:
+ reserve_pool_details.update({"ipv6AddressSpace": True})
+
+ self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
+ self.have.update({"reservePool": reserve_pool})
+ self.msg = "Collecting the reserve pool details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_have_network(self, config):
+ """
+ Get the current Network details from Cisco DNA
+ Center based on the provided playbook details.
+
+ Parameters:
+ config (dict) - Playbook details containing Network Management configuration.
+
+ Returns:
+ self - The current object with updated Network information.
+ """
+ network = {}
+ site_name = config.get("network_management_details").get("site_name")
+ if site_name is None:
+ self.msg = "Mandatory Parameter 'site_name' missing"
+ self.status = "failed"
+ return self
+
+ site_id = self.get_site_id(site_name)
+ if site_id is None:
+ self.msg = "Failed to get site id from {0}".format(site_name)
+ self.status = "failed"
+ return self
+
+ network["site_id"] = site_id
+ network["net_details"] = self.get_network_params(site_id)
+ self.log("Network details from the Catalyst Center: {0}".format(network), "DEBUG")
+ self.have.update({"network": network})
+ self.msg = "Collecting the network details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current Global Pool Reserved Pool and Network details from Cisco DNA Center
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self - The current object with updated Global Pool,
+ Reserved Pool, and Network information.
+ """
+
+ if config.get("global_pool_details") is not None:
+ self.get_have_global_pool(config).check_return_status()
+
+ if config.get("reserve_pool_details") is not None:
+ self.get_have_reserve_pool(config).check_return_status()
+
+ if config.get("network_management_details") is not None:
+ self.get_have_network(config).check_return_status()
+
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.msg = "Successfully retrieved the details from the Cisco DNA Center"
+ self.status = "success"
+ return self
+
+ def get_want_global_pool(self, global_ippool):
+ """
+ Get all the Global Pool information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ global_ippool (dict) - Playbook global pool details containing IpAddressSpace,
+ DHCP server IPs, DNS server IPs, IP pool name, IP pool CIDR, gateway, and type.
+
+ Returns:
+ self - The current object with updated desired Global Pool information.
+ """
+
+ # Initialize the desired Global Pool configuration
+ want_global = {
+ "settings": {
+ "ippool": [{
+ "IpAddressSpace": global_ippool.get("ip_address_space"),
+ "dhcpServerIps": global_ippool.get("dhcp_server_ips"),
+ "dnsServerIps": global_ippool.get("dns_server_ips"),
+ "ipPoolName": global_ippool.get("name"),
+ "ipPoolCidr": global_ippool.get("cidr"),
+ "gateway": global_ippool.get("gateway"),
+ "type": global_ippool.get("pool_type"),
+ }]
+ }
+ }
+ want_ippool = want_global.get("settings").get("ippool")[0]
+
+ # Converting to the required format based on the existing Global Pool
+ if not self.have.get("globalPool").get("exists"):
+ if want_ippool.get("dhcpServerIps") is None:
+ want_ippool.update({"dhcpServerIps": []})
+ if want_ippool.get("dnsServerIps") is None:
+ want_ippool.update({"dnsServerIps": []})
+ if want_ippool.get("IpAddressSpace") is None:
+ want_ippool.update({"IpAddressSpace": ""})
+ if want_ippool.get("gateway") is None:
+ want_ippool.update({"gateway": ""})
+ if want_ippool.get("type") is None:
+ global_ippool_type = global_ippool.get("type")
+ if not global_ippool_type:
+ want_ippool.update({"type": "Generic"})
+ else:
+ want_ippool.update({"type": global_ippool_type})
+ self.log("'type' is deprecated and use 'pool_type'", "WARNING")
+
+ else:
+ have_ippool = self.have.get("globalPool").get("details") \
+ .get("settings").get("ippool")[0]
+
+ # Copy existing Global Pool information if the desired configuration is not provided
+ want_ippool.update({
+ "IpAddressSpace": have_ippool.get("IpAddressSpace"),
+ "type": have_ippool.get("type"),
+ "ipPoolCidr": have_ippool.get("ipPoolCidr")
+ })
+ want_ippool.update({})
+ want_ippool.update({})
+
+ for key in ["dhcpServerIps", "dnsServerIps", "gateway"]:
+ if want_ippool.get(key) is None and have_ippool.get(key) is not None:
+ want_ippool[key] = have_ippool[key]
+
+ self.log("Global pool playbook details: {0}".format(want_global), "DEBUG")
+ self.want.update({"wantGlobal": want_global})
+ self.msg = "Collecting the global pool details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want_reserve_pool(self, reserve_pool):
+ """
+ Get all the Reserved Pool information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ reserve_pool (dict) - Playbook reserved pool
+ details containing various properties.
+
+ Returns:
+ self - The current object with updated desired Reserved Pool information.
+ """
+
+ want_reserve = {
+ "name": reserve_pool.get("name"),
+ "type": reserve_pool.get("pool_type"),
+ "ipv6AddressSpace": reserve_pool.get("ipv6_address_space"),
+ "ipv4GlobalPool": reserve_pool.get("ipv4_global_pool"),
+ "ipv4Prefix": reserve_pool.get("ipv4_prefix"),
+ "ipv4PrefixLength": reserve_pool.get("ipv4_prefix_length"),
+ "ipv4GateWay": reserve_pool.get("ipv4_gateway"),
+ "ipv4DhcpServers": reserve_pool.get("ipv4_dhcp_servers"),
+ "ipv4DnsServers": reserve_pool.get("ipv4_dns_servers"),
+ "ipv4Subnet": reserve_pool.get("ipv4_subnet"),
+ "ipv6GlobalPool": reserve_pool.get("ipv6_global_pool"),
+ "ipv6Prefix": reserve_pool.get("ipv6_prefix"),
+ "ipv6PrefixLength": reserve_pool.get("ipv6_prefix_length"),
+ "ipv6GateWay": reserve_pool.get("ipv6_gateway"),
+ "ipv6DhcpServers": reserve_pool.get("ipv6_dhcp_servers"),
+ "ipv6Subnet": reserve_pool.get("ipv6_subnet"),
+ "ipv6DnsServers": reserve_pool.get("ipv6_dns_servers"),
+ "ipv4TotalHost": reserve_pool.get("ipv4_total_host"),
+ "ipv6TotalHost": reserve_pool.get("ipv6_total_host")
+ }
+
+ # Check for missing mandatory parameters in the playbook
+ if not want_reserve.get("name"):
+ self.msg = "Missing mandatory parameter 'name' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("ipv4Prefix") is True:
+ if want_reserve.get("ipv4Subnet") is None and \
+ want_reserve.get("ipv4TotalHost") is None:
+ self.msg = "missing parameter 'ipv4_subnet' or 'ipv4TotalHost' \
+ while adding the ipv4 in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("ipv6Prefix") is True:
+ if want_reserve.get("ipv6Subnet") is None and \
+ want_reserve.get("ipv6TotalHost") is None:
+ self.msg = "missing parameter 'ipv6_subnet' or 'ipv6TotalHost' \
+ while adding the ipv6 in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ self.log("Reserved IP pool playbook details: {0}".format(want_reserve), "DEBUG")
+
+ # If there are no existing Reserved Pool details, validate and set defaults
+ if not self.have.get("reservePool").get("details"):
+ if not want_reserve.get("ipv4GlobalPool"):
+ self.msg = "missing parameter 'ipv4GlobalPool' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if not want_reserve.get("ipv4PrefixLength"):
+ self.msg = "missing parameter 'ipv4_prefix_length' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("type") is None:
+ reserve_pool_type = reserve_pool.get("type")
+ if not reserve_pool_type:
+ want_reserve.update({"type": "Generic"})
+ else:
+ want_reserve.update({"type": reserve_pool_type})
+ self.log("'type' is deprecated and use 'pool_type'", "WARNING")
+ if want_reserve.get("ipv4GateWay") is None:
+ want_reserve.update({"ipv4GateWay": ""})
+ if want_reserve.get("ipv4DhcpServers") is None:
+ want_reserve.update({"ipv4DhcpServers": []})
+ if want_reserve.get("ipv4DnsServers") is None:
+ want_reserve.update({"ipv4DnsServers": []})
+ if want_reserve.get("ipv6AddressSpace") is None:
+ want_reserve.update({"ipv6AddressSpace": False})
+ if want_reserve.get("slaacSupport") is None:
+ want_reserve.update({"slaacSupport": True})
+ if want_reserve.get("ipv4TotalHost") is None:
+ del want_reserve['ipv4TotalHost']
+ if want_reserve.get("ipv6AddressSpace") is True:
+ want_reserve.update({"ipv6Prefix": True})
+ else:
+ del want_reserve['ipv6Prefix']
+
+ if not want_reserve.get("ipv6AddressSpace"):
+ keys_to_check = ['ipv6GlobalPool', 'ipv6PrefixLength',
+ 'ipv6GateWay', 'ipv6DhcpServers',
+ 'ipv6DnsServers', 'ipv6TotalHost']
+ for key in keys_to_check:
+ if want_reserve.get(key) is None:
+ del want_reserve[key]
+ else:
+ keys_to_delete = ['type', 'ipv4GlobalPool',
+ 'ipv4Prefix', 'ipv4PrefixLength',
+ 'ipv4TotalHost', 'ipv4Subnet']
+ for key in keys_to_delete:
+ if key in want_reserve:
+ del want_reserve[key]
+
+ self.want.update({"wantReserve": want_reserve})
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Collecting the reserve pool details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want_network(self, network_management_details):
+ """
+ Get all the Network related information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ network_management_details (dict) - Playbook network
+ details containing various network settings.
+
+ Returns:
+ self - The current object with updated desired Network-related information.
+ """
+
+ want_network = {
+ "settings": {
+ "dhcpServer": {},
+ "dnsServer": {},
+ "snmpServer": {},
+ "syslogServer": {},
+ "netflowcollector": {},
+ "ntpServer": {},
+ "timezone": "",
+ "messageOfTheday": {},
+ "network_aaa": {},
+ "clientAndEndpoint_aaa": {}
+ }
+ }
+ want_network_settings = want_network.get("settings")
+ self.log("Current state (have): {0}".format(self.have), "DEBUG")
+ if network_management_details.get("dhcp_server") is not None:
+ want_network_settings.update({
+ "dhcpServer": network_management_details.get("dhcp_server")
+ })
+ else:
+ del want_network_settings["dhcpServer"]
+
+ if network_management_details.get("ntp_server") is not None:
+ want_network_settings.update({
+ "ntpServer": network_management_details.get("ntp_server")
+ })
+ else:
+ del want_network_settings["ntpServer"]
+
+ if network_management_details.get("timezone") is not None:
+ want_network_settings["timezone"] = \
+ network_management_details.get("timezone")
+ else:
+ self.msg = "missing parameter timezone in network"
+ self.status = "failed"
+ return self
+
+ dnsServer = network_management_details.get("dns_server")
+ if dnsServer is not None:
+ if dnsServer.get("domain_name") is not None:
+ want_network_settings.get("dnsServer").update({
+ "domainName":
+ dnsServer.get("domain_name")
+ })
+
+ if dnsServer.get("primary_ip_address") is not None:
+ want_network_settings.get("dnsServer").update({
+ "primaryIpAddress":
+ dnsServer.get("primary_ip_address")
+ })
+
+ if dnsServer.get("secondary_ip_address") is not None:
+ want_network_settings.get("dnsServer").update({
+ "secondaryIpAddress":
+ dnsServer.get("secondary_ip_address")
+ })
+ else:
+ del want_network_settings["dnsServer"]
+
+ snmpServer = network_management_details.get("snmp_server")
+ if snmpServer is not None:
+ if snmpServer.get("configure_dnac_ip") is not None:
+ want_network_settings.get("snmpServer").update({
+ "configureDnacIP": snmpServer.get("configure_dnac_ip")
+ })
+ if snmpServer.get("ip_addresses") is not None:
+ want_network_settings.get("snmpServer").update({
+ "ipAddresses": snmpServer.get("ip_addresses")
+ })
+ else:
+ del want_network_settings["snmpServer"]
+
+ syslogServer = network_management_details.get("syslog_server")
+ if syslogServer is not None:
+ if syslogServer.get("configure_dnac_ip") is not None:
+ want_network_settings.get("syslogServer").update({
+ "configureDnacIP": syslogServer.get("configure_dnac_ip")
+ })
+ if syslogServer.get("ip_addresses") is not None:
+ want_network_settings.get("syslogServer").update({
+ "ipAddresses": syslogServer.get("ip_addresses")
+ })
+ else:
+ del want_network_settings["syslogServer"]
+
+ netflowcollector = network_management_details.get("netflow_collector")
+ if netflowcollector is not None:
+ if netflowcollector.get("ip_address") is not None:
+ want_network_settings.get("netflowcollector").update({
+ "ipAddress":
+ netflowcollector.get("ip_address")
+ })
+ if netflowcollector.get("port") is not None:
+ want_network_settings.get("netflowcollector").update({
+ "port":
+ netflowcollector.get("port")
+ })
+ else:
+ del want_network_settings["netflowcollector"]
+
+ messageOfTheday = network_management_details.get("message_of_the_day")
+ if messageOfTheday is not None:
+ if messageOfTheday.get("banner_message") is not None:
+ want_network_settings.get("messageOfTheday").update({
+ "bannerMessage":
+ messageOfTheday.get("banner_message")
+ })
+ if messageOfTheday.get("retain_existing_banner") is not None:
+ want_network_settings.get("messageOfTheday").update({
+ "retainExistingBanner":
+ messageOfTheday.get("retain_existing_banner")
+ })
+ else:
+ del want_network_settings["messageOfTheday"]
+
+ network_aaa = network_management_details.get("network_aaa")
+ if network_aaa:
+ if network_aaa.get("ip_address"):
+ want_network_settings.get("network_aaa").update({
+ "ipAddress":
+ network_aaa.get("ip_address")
+ })
+ else:
+ if network_aaa.get("servers") == "ISE":
+ self.msg = "missing parameter ip_address in network_aaa, server ISE is set"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("network"):
+ want_network_settings.get("network_aaa").update({
+ "network": network_aaa.get("network")
+ })
+ else:
+ self.msg = "missing parameter network in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("protocol"):
+ want_network_settings.get("network_aaa").update({
+ "protocol":
+ network_aaa.get("protocol")
+ })
+ else:
+ self.msg = "missing parameter protocol in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("servers"):
+ want_network_settings.get("network_aaa").update({
+ "servers":
+ network_aaa.get("servers")
+ })
+ else:
+ self.msg = "missing parameter servers in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("shared_secret"):
+ want_network_settings.get("network_aaa").update({
+ "sharedSecret":
+ network_aaa.get("shared_secret")
+ })
+ else:
+ del want_network_settings["network_aaa"]
+
+ clientAndEndpoint_aaa = network_management_details.get("client_and_endpoint_aaa")
+ if clientAndEndpoint_aaa:
+ if clientAndEndpoint_aaa.get("ip_address"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "ipAddress":
+ clientAndEndpoint_aaa.get("ip_address")
+ })
+ else:
+ if clientAndEndpoint_aaa.get("servers") == "ISE":
+ self.msg = "missing parameter ip_address in clientAndEndpoint_aaa, \
+ server ISE is set"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("network"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "network":
+ clientAndEndpoint_aaa.get("network")
+ })
+ else:
+ self.msg = "missing parameter network in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("protocol"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "protocol":
+ clientAndEndpoint_aaa.get("protocol")
+ })
+ else:
+ self.msg = "missing parameter protocol in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("servers"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "servers":
+ clientAndEndpoint_aaa.get("servers")
+ })
+ else:
+ self.msg = "missing parameter servers in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("shared_secret"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "sharedSecret":
+ clientAndEndpoint_aaa.get("shared_secret")
+ })
+ else:
+ del want_network_settings["clientAndEndpoint_aaa"]
+
+ self.log("Network playbook details: {0}".format(want_network), "DEBUG")
+ self.want.update({"wantNetwork": want_network})
+ self.msg = "Collecting the network details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want(self, config):
+ """
+ Get all the Global Pool Reserved Pool and Network related information from playbook
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ None
+ """
+
+ if config.get("global_pool_details"):
+ global_ippool = config.get("global_pool_details").get("settings").get("ip_pool")[0]
+ self.get_want_global_pool(global_ippool).check_return_status()
+
+ if config.get("reserve_pool_details"):
+ reserve_pool = config.get("reserve_pool_details")
+ self.get_want_reserve_pool(reserve_pool).check_return_status()
+
+ if config.get("network_management_details"):
+ network_management_details = config.get("network_management_details") \
+ .get("settings")
+ self.get_want_network(network_management_details).check_return_status()
+
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Successfully retrieved details from the playbook"
+ self.status = "success"
+ return self
+
+ def update_global_pool(self, config):
+ """
+ Update/Create Global Pool in Cisco DNA Center with fields provided in playbook
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ None
+ """
+
+ name = config.get("global_pool_details") \
+ .get("settings").get("ip_pool")[0].get("name")
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ result_global_pool.get("response").update({name: {}})
+
+ # Check pool exist, if not create and return
+ if not self.have.get("globalPool").get("exists"):
+ pool_params = self.want.get("wantGlobal")
+ self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="create_global_pool",
+ params=pool_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Successfully created global pool '{0}'.".format(name), "INFO")
+ result_global_pool.get("response").get(name) \
+ .update({"globalPool Details": self.want.get("wantGlobal")})
+ result_global_pool.get("msg").update({name: "Global Pool Created Successfully"})
+ return
+
+ # Pool exists, check update is required
+ if not self.requires_update(self.have.get("globalPool").get("details"),
+ self.want.get("wantGlobal"), self.global_pool_obj_params):
+ self.log("Global pool '{0}' doesn't require an update".format(name), "INFO")
+ result_global_pool.get("response").get(name).update({
+ "Cisco DNA Center params":
+ self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
+ })
+ result_global_pool.get("response").get(name).update({
+ "Id": self.have.get("globalPool").get("id")
+ })
+ result_global_pool.get("msg").update({
+ name: "Global pool doesn't require an update"
+ })
+ return
+
+ self.log("Global pool requires update", "DEBUG")
+ # Pool Exists
+ pool_params = copy.deepcopy(self.want.get("wantGlobal"))
+ pool_params_ippool = pool_params.get("settings").get("ippool")[0]
+ pool_params_ippool.update({"id": self.have.get("globalPool").get("id")})
+ self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ keys_to_remove = ["IpAddressSpace", "ipPoolCidr", "type"]
+ for key in keys_to_remove:
+ del pool_params["settings"]["ippool"][0][key]
+
+ have_ippool = self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
+ keys_to_update = ["dhcpServerIps", "dnsServerIps", "gateway"]
+ for key in keys_to_update:
+ if pool_params_ippool.get(key) is None:
+ pool_params_ippool[key] = have_ippool.get(key)
+
+ self.log("Desired global pool details (want): {0}".format(pool_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="update_global_pool",
+ params=pool_params,
+ )
+
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Global pool '{0}' updated successfully".format(name), "INFO")
+ result_global_pool.get("response").get(name) \
+ .update({"Id": self.have.get("globalPool").get("details").get("id")})
+ result_global_pool.get("msg").update({name: "Global Pool Updated Successfully"})
+ return
+
+ def update_reserve_pool(self, config):
+ """
+ Update or Create a Reserve Pool in Cisco DNA Center based on the provided configuration.
+ This method checks if a reserve pool with the specified name exists in Cisco DNA Center.
+ If it exists and requires an update, it updates the pool. If not, it creates a new pool.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Reserve Pool information.
+
+ Returns:
+ None
+ """
+
+ name = config.get("reserve_pool_details").get("name")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ result_reserve_pool.get("response").update({name: {}})
+ self.log("Current reserved pool details in Catalyst Center: {0}"
+ .format(self.have.get("reservePool").get("details")), "DEBUG")
+ self.log("Desired reserved pool details in Catalyst Center: {0}"
+ .format(self.want.get("wantReserve")), "DEBUG")
+
+ # Check pool exist, if not create and return
+ self.log("IPv4 global pool: {0}"
+ .format(self.want.get("wantReserve").get("ipv4GlobalPool")), "DEBUG")
+ site_name = config.get("reserve_pool_details").get("site_name")
+ reserve_params = self.want.get("wantReserve")
+ site_id = self.get_site_id(site_name)
+ reserve_params.update({"site_id": site_id})
+ if not self.have.get("reservePool").get("exists"):
+ self.log("Desired reserved pool details (want): {0}".format(reserve_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="reserve_ip_subpool",
+ params=reserve_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Successfully created IP subpool reservation '{0}'.".format(name), "INFO")
+ result_reserve_pool.get("response").get(name) \
+ .update({"reservePool Details": self.want.get("wantReserve")})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip Subpool Reservation Created Successfully"})
+ return
+
+ # Check update is required
+ if not self.requires_update(self.have.get("reservePool").get("details"),
+ self.want.get("wantReserve"), self.reserve_pool_obj_params):
+ self.log("Reserved ip subpool '{0}' doesn't require an update".format(name), "INFO")
+ result_reserve_pool.get("response").get(name) \
+ .update({"Cisco DNA Center params": self.have.get("reservePool").get("details")})
+ result_reserve_pool.get("response").get(name) \
+ .update({"Id": self.have.get("reservePool").get("id")})
+ result_reserve_pool.get("msg") \
+ .update({name: "Reserve ip subpool doesn't require an update"})
+ return
+
+ self.log("Reserved ip pool '{0}' requires an update".format(name), "DEBUG")
+ # Pool Exists
+ self.log("Current reserved ip pool '{0}' details in Catalyst Center: {1}"
+ .format(name, self.have.get("reservePool")), "DEBUG")
+ self.log("Desired reserved ip pool '{0}' details: {1}"
+ .format(name, self.want.get("wantReserve")), "DEBUG")
+ reserve_params.update({"id": self.have.get("reservePool").get("id")})
+ response = self.dnac._exec(
+ family="network_settings",
+ function="update_reserve_ip_subpool",
+ params=reserve_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Reserved ip subpool '{0}' updated successfully.".format(name), "INFO")
+ result_reserve_pool['msg'] = "Reserved Ip Subpool Updated Successfully"
+ result_reserve_pool.get("response").get(name) \
+ .update({"Reservation details": self.have.get("reservePool").get("details")})
+ return
+
+ def update_network(self, config):
+ """
+ Update or create a network configuration in Cisco DNA
+ Center based on the provided playbook details.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Network Management information.
+
+ Returns:
+ None
+ """
+
+ site_name = config.get("network_management_details").get("site_name")
+ result_network = self.result.get("response")[2].get("network")
+ result_network.get("response").update({site_name: {}})
+
+ # Check update is required or not
+ if not self.requires_update(self.have.get("network").get("net_details"),
+ self.want.get("wantNetwork"), self.network_obj_params):
+
+ self.log("Network in site '{0}' doesn't require an update.".format(site_name), "INFO")
+ result_network.get("response").get(site_name).update({
+ "Cisco DNA Center params": self.have.get("network")
+ .get("net_details").get("settings")
+ })
+ result_network.get("msg").update({site_name: "Network doesn't require an update"})
+ return
+
+ self.log("Network in site '{0}' requires update.".format(site_name), "INFO")
+ self.log("Current State of network in Catalyst Center: {0}"
+ .format(self.have.get("network")), "DEBUG")
+ self.log("Desired State of network: {0}".format(self.want.get("wantNetwork")), "DEBUG")
+
+ net_params = copy.deepcopy(self.want.get("wantNetwork"))
+ net_params.update({"site_id": self.have.get("network").get("site_id")})
+ response = self.dnac._exec(
+ family="network_settings",
+ function='update_network_v2',
+ params=net_params,
+ )
+ self.log("Received API response of 'update_network_v2': {0}".format(response), "DEBUG")
+ validation_string = "desired common settings operation successful"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Network has been changed successfully", "INFO")
+ result_network.get("msg") \
+ .update({site_name: "Network Updated successfully"})
+ result_network.get("response").get(site_name) \
+ .update({"Network Details": self.want.get("wantNetwork").get("settings")})
+ return
+
+ def get_diff_merged(self, config):
+ """
+ Update or create Global Pool, Reserve Pool, and
+ Network configurations in Cisco DNA Center based on the playbook details
+
+ Parameters:
+ config (list of dict) - Playbook details containing
+ Global Pool, Reserve Pool, and Network Management information.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_pool_details") is not None:
+ self.update_global_pool(config)
+
+ if config.get("reserve_pool_details") is not None:
+ self.update_reserve_pool(config)
+
+ if config.get("network_management_details") is not None:
+ self.update_network(config)
+
+ return self
+
+ def delete_reserve_pool(self, name):
+ """
+ Delete a Reserve Pool by name in Cisco DNA Center
+
+ Parameters:
+ name (str) - The name of the Reserve Pool to be deleted.
+
+ Returns:
+ self
+ """
+
+ reserve_pool_exists = self.have.get("reservePool").get("exists")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+
+ if not reserve_pool_exists:
+ result_reserve_pool.get("response").update({name: "Reserve Pool not found"})
+ self.msg = "Reserved Ip Subpool Not Found"
+ self.status = "success"
+ return self
+
+ self.log("Reserved IP pool scheduled for deletion: {0}"
+ .format(self.have.get("reservePool").get("name")), "INFO")
+ _id = self.have.get("reservePool").get("id")
+ self.log("Reserved pool {0} id: {1}".format(name, _id), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="release_reserve_ip_subpool",
+ params={"id": _id},
+ )
+ self.check_execution_response_status(response).check_return_status()
+ executionid = response.get("executionId")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ result_reserve_pool.get("response").update({name: {}})
+ result_reserve_pool.get("response").get(name) \
+ .update({"Execution Id": executionid})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip subpool reservation released successfully"})
+ self.msg = "Reserved pool - {0} released successfully".format(name)
+ self.status = "success"
+ return self
+
+ def delete_global_pool(self, name):
+ """
+ Delete a Global Pool by name in Cisco DNA Center
+
+ Parameters:
+ name (str) - The name of the Global Pool to be deleted.
+
+ Returns:
+ self
+ """
+
+ global_pool_exists = self.have.get("globalPool").get("exists")
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ if not global_pool_exists:
+ result_global_pool.get("response").update({name: "Global Pool not found"})
+ self.msg = "Global pool Not Found"
+ self.status = "success"
+ return self
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function="delete_global_ip_pool",
+ params={"id": self.have.get("globalPool").get("id")},
+ )
+
+ # Check the execution status
+ self.check_execution_response_status(response).check_return_status()
+ executionid = response.get("executionId")
+
+ # Update result information
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ result_global_pool.get("response").update({name: {}})
+ result_global_pool.get("response").get(name).update({"Execution Id": executionid})
+ result_global_pool.get("msg").update({name: "Pool deleted successfully"})
+ self.msg = "Global pool - {0} deleted successfully".format(name)
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete Reserve Pool and Global Pool in Cisco DNA Center based on playbook details.
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ self
+ """
+
+ if config.get("reserve_pool_details") is not None:
+ name = config.get("reserve_pool_details").get("name")
+ self.delete_reserve_pool(name).check_return_status()
+
+ if config.get("global_pool_details") is not None:
+ name = config.get("global_pool_details") \
+ .get("settings").get("ip_pool")[0].get("name")
+ self.delete_global_pool(name).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Requested State (want): {0}".format(self.want), "INFO")
+ if config.get("global_pool_details") is not None:
+ self.log("Desired State of global pool (want): {0}"
+ .format(self.want.get("wantGlobal")), "DEBUG")
+ self.log("Current State of global pool (have): {0}"
+ .format(self.have.get("globalPool").get("details")), "DEBUG")
+ if self.requires_update(self.have.get("globalPool").get("details"),
+ self.want.get("wantGlobal"), self.global_pool_obj_params):
+ self.msg = "Global Pool Config is not applied to the DNAC"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated global pool '{0}'.".format(self.want
+ .get("wantGlobal").get("settings").get("ippool")[0].get("ipPoolName")), "INFO")
+ self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
+
+ if config.get("reserve_pool_details") is not None:
+ if self.requires_update(self.have.get("reservePool").get("details"),
+ self.want.get("wantReserve"), self.reserve_pool_obj_params):
+ self.log("Desired State for reserve pool (want): {0}"
+ .format(self.want.get("wantReserve")), "DEBUG")
+ self.log("Current State for reserve pool (have): {0}"
+ .format(self.have.get("reservePool").get("details")), "DEBUG")
+ self.msg = "Reserved Pool Config is not applied to the DNAC"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the reserved pool '{0}'."
+ .format(self.want.get("wantReserve").get("name")), "INFO")
+ self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
+
+ if config.get("network_management_details") is not None:
+ if self.requires_update(self.have.get("network").get("net_details"),
+ self.want.get("wantNetwork"), self.network_obj_params):
+ self.msg = "Network Functions Config is not applied to the DNAC"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the network functions '{0}'."
+ .format(config.get("network_management_details").get("site_name")), "INFO")
+ self.result.get("response")[2].get("network").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Global Pool, Reserve Pool \
+ and the Network Functions."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ if config.get("global_pool_details") is not None:
+ global_pool_exists = self.have.get("globalPool").get("exists")
+ if global_pool_exists:
+ self.msg = "Global Pool Config is not applied to the DNAC"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of Global Pool '{0}'."
+ .format(config.get("global_pool_details")
+ .get("settings").get("ip_pool")[0].get("name")), "INFO")
+ self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
+
+ if config.get("reserve_pool_details") is not None:
+ reserve_pool_exists = self.have.get("reservePool").get("exists")
+ if reserve_pool_exists:
+ self.msg = "Reserved Pool Config is not applied to the Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the absence of Reserve Pool '{0}'."
+ .format(config.get("reserve_pool_details").get("name")), "INFO")
+ self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the absence of Global Pool/Reserve Pool"
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+
+ self.have.clear()
+ self.want.clear()
+ return
+
+
+def main():
+ """main entry point for module execution"""
+
+ # Define the specification for module arguments
+ element_spec = {
+ "dnac_host": {"type": 'str', "required": True},
+ "dnac_port": {"type": 'str', "default": '443'},
+ "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']},
+ "dnac_password": {"type": 'str', "no_log": True},
+ "dnac_verify": {"type": 'bool', "default": 'True'},
+ "dnac_version": {"type": 'str', "default": '2.2.3.3'},
+ "dnac_debug": {"type": 'bool', "default": False},
+ "dnac_log": {"type": 'bool', "default": False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ "dnac_api_task_timeout": {"type": 'int', "default": 1200},
+ "dnac_task_poll_interval": {"type": 'int', "default": 2},
+ "config": {"type": 'list', "required": True, "elements": 'dict'},
+ "state": {"default": 'merged', "choices": ['merged', 'deleted']},
+ "validate_response_schema": {"type": 'bool', "default": True},
+ }
+
+ # Create an AnsibleModule object with argument specifications
+ module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False)
+ dnac_network = NetworkSettings(module)
+ state = dnac_network.params.get("state")
+ config_verify = dnac_network.params.get("config_verify")
+ if state not in dnac_network.supported_states:
+ dnac_network.status = "invalid"
+ dnac_network.msg = "State {0} is invalid".format(state)
+ dnac_network.check_return_status()
+
+ dnac_network.validate_input().check_return_status()
+
+ for config in dnac_network.config:
+ dnac_network.reset_values()
+ dnac_network.get_have(config).check_return_status()
+ if state != "deleted":
+ dnac_network.get_want(config).check_return_status()
+ dnac_network.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_network.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**dnac_network.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py
new file mode 100644
index 000000000..bbae36463
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py
@@ -0,0 +1,2210 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to perform operations on global pool, reserve pool and network in Cisco Catalyst Center."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan']
+
+DOCUMENTATION = r"""
+---
+module: network_settings_workflow_manager
+short_description: Resource module for IP Address pools and network functions
+description:
+- Manage operations on Global Pool, Reserve Pool, Network resources.
+- API to create/update/delete global pool.
+- API to reserve/update/delete an ip subpool from the global pool.
+- API to update network settings for DHCP, Syslog, SNMP, NTP, Network AAA, Client and Endpoint AAA,
+ and/or DNS center server settings.
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Muthu Rakesh (@MUTHU-RAKESH-27)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of global pool, reserved pool, network being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ global_pool_details:
+ description: Manages IPv4 and IPv6 IP pools in the global level.
+ type: dict
+ suboptions:
+ settings:
+ description: Global Pool's settings.
+ type: dict
+ suboptions:
+ ip_pool:
+ description: Contains a list of global IP pool configurations.
+ elements: dict
+ type: list
+ suboptions:
+ dhcp_server_ips:
+ description: >
+ The DHCP server IPs responsible for automatically assigning IP addresses
+ and network configuration parameters to devices on a local network.
+ elements: str
+ type: list
+ dns_server_ips:
+ description: Responsible for translating domain names into corresponding IP addresses.
+ elements: str
+ type: list
+ gateway:
+ description: Serves as an entry or exit point for data traffic between networks.
+ type: str
+ ip_address_space:
+ description: IP address space either IPv4 or IPv6.
+ type: str
+ cidr:
+ description: >
+ Defines the IP pool's Classless Inter-Domain Routing block,
+ enabling systematic IP address distribution within a network.
+ type: str
+ prev_name:
+ description: >
+ The former identifier for the global pool. It should be used
+ exclusively when you need to update the global pool's name.
+ type: str
+ name:
+ description: Specifies the name assigned to the Global IP Pool.
+ type: str
+ pool_type:
+ description: >
+ Includes both the Generic Ip Pool and Tunnel Ip Pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ Tunnel - Designated for the tunnel interfaces to encapsulate packets
+ within the network protocol. It is used in VPN connections,
+ GRE tunnels, or other types of overlay networks.
+ default: Generic
+ choices: [Generic, Tunnel]
+ type: str
+
+ reserve_pool_details:
+ description: Reserved IP subpool details from the global pool.
+ type: dict
+ suboptions:
+ ipv4_dhcp_servers:
+ description: Specifies the IPv4 addresses for DHCP servers, for example, "1.1.1.1".
+ elements: str
+ type: list
+ ipv4_dns_servers:
+ description: Specifies the IPv4 addresses for DNS servers, for example, "4.4.4.4".
+ elements: str
+ type: list
+ ipv4_gateway:
+ description: Provides the gateway's IPv4 address, for example, "175.175.0.1".
+ type: str
+ version_added: 4.0.0
+ ipv4_global_pool:
+ description: IP v4 Global pool address with cidr, example 175.175.0.0/16.
+ type: str
+ ipv4_prefix:
+ description: ip4 prefix length is enabled or ipv4 total Host input is enabled
+ type: bool
+ ipv4_prefix_length:
+ description: The ipv4 prefix length is required when ipv4_prefix value is true.
+ type: int
+ ipv4_subnet:
+ description: Indicates the IPv4 subnet address, for example, "175.175.0.0".
+ type: str
+ ipv4_total_host:
+ description: The total number of hosts for IPv4, required when the 'ipv4_prefix' is set to false.
+ type: int
+ ipv6_address_space:
+ description: >
+ Determines whether both IPv6 and IPv4 inputs are required.
+ If set to false, only IPv4 inputs are required.
+ If set to true, both IPv6 and IPv4 inputs are required.
+ type: bool
+ ipv6_dhcp_servers:
+ description: >
+ Specifies the IPv6 addresses for DHCP servers in the format.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0001:0001".
+ elements: str
+ type: list
+ ipv6_dns_servers:
+ description: >
+ Specifies the IPv6 addresses for DNS servers.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0002:0002".
+ elements: str
+ type: list
+ ipv6_gateway:
+ description: >
+ Provides the gateway's IPv6 address.
+ For example, "2001:0db8:0123:4567:89ab:cdef:0003:0003".
+ type: str
+ ipv6_global_pool:
+ description: >
+ IPv6 Global pool address with cidr this is required when ipv6_address_space
+ value is true, example 2001 db8 85a3 /64.
+ type: str
+ ipv6_prefix:
+ description: >
+ Ipv6 prefix value is true, the ip6 prefix length input field is enabled,
+ if it is false ipv6 total Host input is enable.
+ type: bool
+ ipv6_prefix_length:
+ description: IPv6 prefix length is required when the ipv6_prefix value is true.
+ type: int
+ ipv6_subnet:
+ description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
+ type: str
+ ipv6_total_host:
+ description: The total number of hosts for IPv6 is required if the 'ipv6_prefix' is set to false.
+ type: int
+ name:
+ description: Name of the reserve IP subpool.
+ type: str
+ prev_name:
+ description: The former name associated with the reserved IP sub-pool.
+ type: str
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved.
+ type: str
+ slaac_support:
+ description: >
+ Allows devices on IPv6 networks to self-configure their
+ IP addresses autonomously, eliminating the need for manual setup.
+ type: bool
+ pool_type:
+ description: Type of the reserve ip sub pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ LAN - Used for the devices and the resources within the Local Area Network
+ such as device connectivity, internal communication, or services.
+ Management - Used for the management purposes such as device management interfaces,
+ management access, or other administrative functions.
+ Service - Used for the network services and application such as DNS (Domain Name System),
+ DHCP (Dynamic Host Configuration Protocol), NTP (Network Time Protocol).
+ WAN - Used for the devices and resources with the Wide Area Network such as remote
+ sites interconnection with other network or services hosted within WAN.
+ default: Generic
+ choices: [Generic, LAN, Management, Service, WAN]
+ type: str
+ network_management_details:
+ description: Set default network settings for the site
+ type: dict
+ suboptions:
+ settings:
+ description: Network management details settings.
+ type: dict
+ suboptions:
+ client_and_endpoint_aaa:
+ description: Network V2's clientAndEndpoint_aaa.
+ suboptions:
+ ip_address:
+ description: IP address for ISE serve (eg 1.1.1.4).
+ type: str
+ network:
+ description: IP address for AAA or ISE server (eg 2.2.2.1).
+ type: str
+ protocol:
+ description: Protocol for AAA or ISE serve (eg RADIUS).
+ type: str
+ servers:
+ description: Server type AAA or ISE server (eg AAA).
+ type: str
+ shared_secret:
+ description: Shared secret for ISE server.
+ type: str
+ type: dict
+ dhcp_server:
+ description: DHCP Server IP (eg 1.1.1.1).
+ elements: str
+ type: list
+ dns_server:
+ description: Network V2's dnsServer.
+ suboptions:
+ domain_name:
+ description: Domain Name of DHCP (eg; cisco).
+ type: str
+ primary_ip_address:
+ description: Primary IP Address for DHCP (eg 2.2.2.2).
+ type: str
+ secondary_ip_address:
+ description: Secondary IP Address for DHCP (eg 3.3.3.3).
+ type: str
+ type: dict
+ message_of_the_day:
+ description: Network V2's messageOfTheday.
+ suboptions:
+ banner_message:
+ description: Massage for Banner message (eg; Good day).
+ type: str
+ retain_existing_banner:
+ description: Retain existing Banner Message (eg "true" or "false").
+ type: str
+ type: dict
+ netflow_collector:
+ description: Network V2's netflowcollector.
+ suboptions:
+ ip_address:
+ description: IP Address for NetFlow collector (eg 3.3.3.1).
+ type: str
+ port:
+ description: Port for NetFlow Collector (eg; 443).
+ type: int
+ type: dict
+ network_aaa:
+ description: Network V2's network_aaa.
+ suboptions:
+ ip_address:
+ description: IP address for AAA and ISE server (eg 1.1.1.1).
+ type: str
+ network:
+ description: IP Address for AAA or ISE server (eg 2.2.2.2).
+ type: str
+ protocol:
+ description: Protocol for AAA or ISE serve (eg RADIUS).
+ type: str
+ servers:
+ description: Server type for AAA Network (eg AAA).
+ type: str
+ shared_secret:
+ description: Shared secret for ISE Server.
+ type: str
+ type: dict
+ ntp_server:
+ description: IP address for NTP server (eg 1.1.1.2).
+ elements: str
+ type: list
+ snmp_server:
+ description: Network V2's snmpServer.
+ suboptions:
+ configure_dnac_ip:
+ description: Configuration Cisco Catalyst Center IP for SNMP Server (eg true).
+ type: bool
+ ip_addresses:
+ description: IP Address for SNMP Server (eg 4.4.4.1).
+ elements: str
+ type: list
+ type: dict
+ syslog_server:
+ description: Network V2's syslogServer.
+ suboptions:
+ configure_dnac_ip:
+ description: Configuration Cisco Catalyst Center IP for syslog server (eg true).
+ type: bool
+ ip_addresses:
+ description: IP Address for syslog server (eg 4.4.4.4).
+ elements: str
+ type: list
+ type: dict
+ timezone:
+ description: Input for time zone (eg Africa/Abidjan).
+ type: str
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved.
+ type: str
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Method used are
+ network_settings.NetworkSettings.create_global_pool,
+ network_settings.NetworkSettings.delete_global_ip_pool,
+ network_settings.NetworkSettings.update_global_pool,
+ network_settings.NetworkSettings.release_reserve_ip_subpool,
+ network_settings.NetworkSettings.reserve_ip_subpool,
+ network_settings.NetworkSettings.update_reserve_ip_subpool,
+ network_settings.NetworkSettings.update_network_v2,
+
+ - Paths used are
+ post /dna/intent/api/v1/global-pool,
+ delete /dna/intent/api/v1/global-pool/{id},
+ put /dna/intent/api/v1/global-pool,
+ post /dna/intent/api/v1/reserve-ip-subpool/{siteId},
+ delete /dna/intent/api/v1/reserve-ip-subpool/{id},
+ put /dna/intent/api/v1/reserve-ip-subpool/{siteId},
+ put /dna/intent/api/v2/network/{siteId},
+
+"""
+
+EXAMPLES = r"""
+- name: Create global pool, reserve an ip pool and network
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - global_pool_details:
+ settings:
+ ip_pool:
+ - name: string
+ gateway: string
+ ip_address_space: string
+ cidr: string
+ pool_type: Generic
+ dhcp_server_ips: list
+ dns_server_ips: list
+ reserve_pool_details:
+ ipv6_address_space: True
+ ipv4_global_pool: string
+ ipv4_prefix: True
+ ipv4_prefix_length: 9
+ ipv4_subnet: string
+ name: string
+ ipv6_prefix: True
+ ipv6_prefix_length: 64
+ ipv6_global_pool: string
+ ipv6_subnet: string
+ site_name: string
+ slaac_support: True
+ pool_type: LAN
+ network_management_details:
+ settings:
+ dhcp_server: list
+ dns_server:
+ domain_name: string
+ primary_ip_address: string
+ secondary_ip_address: string
+ client_and_endpoint_aaa:
+ network: string
+ protocol: string
+ servers: string
+ message_of_the_day:
+ banner_message: string
+ retain_existing_banner: string
+ netflow_collector:
+ ip_address: string
+ port: 443
+ network_aaa:
+ network: string
+ protocol: string
+ servers: string
+ ntp_server: list
+ snmp_server:
+ configure_dnac_ip: True
+ ip_addresses: list
+ syslog_server:
+ configure_dnac_ip: True
+ ip_addresses: list
+ site_name: string
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of global pool
+response_1:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+
+# Case_2: Successful creation/updation/deletion of reserve pool
+response_2:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+
+# Case_3: Successful creation/updation of network
+response_3:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
+"""
+
+import copy
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+ dnac_compare_equality,
+)
+
+
+class NetworkSettings(DnacBase):
+ """Class containing member attributes for network_settings_workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.result["response"] = [
+ {"globalPool": {"response": {}, "msg": {}}},
+ {"reservePool": {"response": {}, "msg": {}}},
+ {"network": {"response": {}, "msg": {}}}
+ ]
+ self.global_pool_obj_params = self.get_obj_params("GlobalPool")
+ self.reserve_pool_obj_params = self.get_obj_params("ReservePool")
+ self.network_obj_params = self.get_obj_params("Network")
+
+ def validate_input(self):
+ """
+ Checks if the configuration parameters provided in the playbook
+ meet the expected structure and data types,
+ as defined in the 'temp_spec' dictionary.
+
+ Parameters:
+ None
+
+ Returns:
+ self
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ # temp_spec is the specification for the expected structure of configuration parameters
+ temp_spec = {
+ "global_pool_details": {
+ "type": 'dict',
+ "settings": {
+ "type": 'dict',
+ "ip_pool": {
+ "type": 'list',
+ "ip_address_space": {"type": 'string'},
+ "dhcp_server_ips": {"type": 'list'},
+ "dns_server_ips": {"type": 'list'},
+ "gateway": {"type": 'string'},
+ "cidr": {"type": 'string'},
+ "name": {"type": 'string'},
+ "prev_name": {"type": 'string'},
+ "pool_type": {"type": 'string', "choices": ["Generic", "Tunnel"]},
+ }
+ }
+ },
+ "reserve_pool_details": {
+ "type": 'dict',
+ "name": {"type": 'string'},
+ "prev_name": {"type": 'string'},
+ "ipv6_address_space": {"type": 'bool'},
+ "ipv4_global_pool": {"type": 'string'},
+ "ipv4_prefix": {"type": 'bool'},
+ "ipv4_prefix_length": {"type": 'string'},
+ "ipv4_subnet": {"type": 'string'},
+ "ipv4_gateway": {"type": 'string'},
+ "ipv4_dhcp_servers": {"type": 'list'},
+ "ipv4_dns_servers": {"type": 'list'},
+ "ipv6_global_pool": {"type": 'string'},
+ "ipv6_prefix": {"type": 'bool'},
+ "ipv6_prefix_length": {"type": 'integer'},
+ "ipv6_subnet": {"type": 'string'},
+ "ipv6_gateway": {"type": 'string'},
+ "ipv6_dhcp_servers": {"type": 'list'},
+ "ipv6_dns_servers": {"type": 'list'},
+ "ipv4_total_host": {"type": 'integer'},
+ "ipv6_total_host": {"type": 'integer'},
+ "slaac_support": {"type": 'bool'},
+ "site_name": {"type": 'string'},
+ "pool_type": {
+ "type": 'string',
+ "choices": ["Generic", "LAN", "Management", "Service", "WAN"]
+ },
+ },
+ "network_management_details": {
+ "type": 'dict',
+ "settings": {
+ "type": 'dict',
+ "dhcp_server": {"type": 'list'},
+ "dns_server": {
+ "type": 'dict',
+ "domain_name": {"type": 'string'},
+ "primary_ip_address": {"type": 'string'},
+ "secondary_ip_address": {"type": 'string'}
+ },
+ "syslog_server": {
+ "type": 'dict',
+ "ip_addresses": {"type": 'list'},
+ "configure_dnac_ip": {"type": 'bool'}
+ },
+ "snmp_server": {
+ "type": 'dict',
+ "ip_addresses": {"type": 'list'},
+ "configure_dnac_ip": {"type": 'bool'}
+ },
+ "netflow_collector": {
+ "type": 'dict',
+ "ip_address": {"type": 'string'},
+ "port": {"type": 'integer'},
+ },
+ "timezone": {"type": 'string'},
+ "ntp_server": {"type": 'list'},
+ "message_of_the_day": {
+ "type": 'dict',
+ "banner_message": {"type": 'string'},
+ "retain_existing_banner": {"type": 'bool'},
+ },
+ "network_aaa": {
+ "type": 'dict',
+ "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "ip_address": {"type": 'string'},
+ "network": {"type": 'string'},
+ "protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
+ "shared_secret": {"type": 'string'}
+
+ },
+ "client_and_endpoint_aaa": {
+ "type": 'dict',
+ "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "ip_address": {"type": 'string'},
+ "network": {"type": 'string'},
+ "protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
+ "shared_secret": {"type": 'string'}
+ }
+ },
+ "site_name": {"type": 'string'},
+ }
+ }
+
+ # Validate playbook params against the specification (temp_spec)
+ valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input from the playbook"
+ self.status = "success"
+ return self
+
+ def requires_update(self, have, want, obj_params):
+ """
+ Check if the template config given requires update by comparing
+ current information wih the requested information.
+
+ This method compares the current global pool, reserve pool,
+ or network details from Cisco Catalyst Center with the user-provided details
+ from the playbook, using a specified schema for comparison.
+
+ Parameters:
+ have (dict) - Current information from the Cisco Catalyst Center
+ (global pool, reserve pool, network details)
+ want (dict) - Users provided information from the playbook
+ obj_params (list of tuples) - A list of parameter mappings specifying which
+ Cisco Catalyst Center parameters (dnac_param) correspond to
+ the user-provided parameters (ansible_param).
+
+ Returns:
+ bool - True if any parameter specified in obj_params differs between
+ current_obj and requested_obj, indicating that an update is required.
+ False if all specified parameters are equal.
+
+ """
+
+ current_obj = have
+ requested_obj = want
+ self.log("Current State (have): {0}".format(current_obj), "DEBUG")
+ self.log("Desired State (want): {0}".format(requested_obj), "DEBUG")
+
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def get_obj_params(self, get_object):
+ """
+ Get the required comparison obj_params value
+
+ Parameters:
+ get_object (str) - identifier for the required obj_params
+
+ Returns:
+ obj_params (list) - obj_params value for comparison.
+ """
+
+ try:
+ if get_object == "GlobalPool":
+ obj_params = [
+ ("settings", "settings"),
+ ]
+ elif get_object == "ReservePool":
+ obj_params = [
+ ("name", "name"),
+ ("type", "type"),
+ ("ipv6AddressSpace", "ipv6AddressSpace"),
+ ("ipv4GlobalPool", "ipv4GlobalPool"),
+ ("ipv4Prefix", "ipv4Prefix"),
+ ("ipv4PrefixLength", "ipv4PrefixLength"),
+ ("ipv4GateWay", "ipv4GateWay"),
+ ("ipv4DhcpServers", "ipv4DhcpServers"),
+ ("ipv4DnsServers", "ipv4DnsServers"),
+ ("ipv6GateWay", "ipv6GateWay"),
+ ("ipv6DhcpServers", "ipv6DhcpServers"),
+ ("ipv6DnsServers", "ipv6DnsServers"),
+ ("ipv4TotalHost", "ipv4TotalHost"),
+ ("slaacSupport", "slaacSupport")
+ ]
+ elif get_object == "Network":
+ obj_params = [
+ ("settings", "settings"),
+ ("site_name", "site_name")
+ ]
+ else:
+ raise ValueError("Received an unexpected value for 'get_object': {0}"
+ .format(get_object))
+ except Exception as msg:
+ self.log("Received exception: {0}".format(msg), "CRITICAL")
+
+ return obj_params
+
+ def get_site_id(self, site_name):
+ """
+ Get the site id from the site name.
+ Use check_return_status() to check for failure
+
+ Parameters:
+ site_name (str) - Site name
+
+ Returns:
+ str or None - The Site Id if found, or None if not found or error
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ self.log("Received API response from 'get_site': {0}".format(response), "DEBUG")
+ if not response:
+ self.log("Failed to retrieve the site ID for the site name: {0}"
+ .format(site_name), "ERROR")
+ return None
+
+ _id = response.get("response")[0].get("id")
+ self.log("Site ID for site name '{0}': {1}".format(site_name, _id), "DEBUG")
+ except Exception as msg:
+ self.log("Exception occurred while retrieving site_id from the site_name: {0}"
+ .format(msg), "CRITICAL")
+ return None
+
+ return _id
+
+ def get_global_pool_params(self, pool_info):
+ """
+ Process Global Pool params from playbook data for Global Pool config in Cisco Catalyst Center
+
+ Parameters:
+ pool_info (dict) - Playbook data containing information about the global pool
+
+ Returns:
+ dict or None - Processed Global Pool data in a format suitable
+ for Cisco Catalyst Center configuration, or None if pool_info is empty.
+ """
+
+ if not pool_info:
+ self.log("Global Pool is empty", "INFO")
+ return None
+
+ self.log("Global Pool Details: {0}".format(pool_info), "DEBUG")
+ global_pool = {
+ "settings": {
+ "ippool": [{
+ "dhcpServerIps": pool_info.get("dhcpServerIps"),
+ "dnsServerIps": pool_info.get("dnsServerIps"),
+ "ipPoolCidr": pool_info.get("ipPoolCidr"),
+ "ipPoolName": pool_info.get("ipPoolName"),
+ "type": pool_info.get("ipPoolType").capitalize()
+ }]
+ }
+ }
+ self.log("Formated global pool details: {0}".format(global_pool), "DEBUG")
+ global_ippool = global_pool.get("settings").get("ippool")[0]
+ if pool_info.get("ipv6") is False:
+ global_ippool.update({"IpAddressSpace": "IPv4"})
+ else:
+ global_ippool.update({"IpAddressSpace": "IPv6"})
+
+ self.log("ip_address_space: {0}".format(global_ippool.get("IpAddressSpace")), "DEBUG")
+ if not pool_info["gateways"]:
+ global_ippool.update({"gateway": ""})
+ else:
+ global_ippool.update({"gateway": pool_info.get("gateways")[0]})
+
+ return global_pool
+
+ def get_reserve_pool_params(self, pool_info):
+ """
+ Process Reserved Pool parameters from playbook data
+ for Reserved Pool configuration in Cisco Catalyst Center
+
+ Parameters:
+ pool_info (dict) - Playbook data containing information about the reserved pool
+
+ Returns:
+ reserve_pool (dict) - Processed Reserved pool data
+ in the format suitable for the Cisco Catalyst Center config
+ """
+
+ reserve_pool = {
+ "name": pool_info.get("groupName"),
+ "site_id": pool_info.get("siteId"),
+ }
+ if len(pool_info.get("ipPools")) == 1:
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6AddressSpace": "False"
+ })
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay": pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+ reserve_pool.update({"ipv6AddressSpace": "False"})
+ elif len(pool_info.get("ipPools")) == 2:
+ if not pool_info.get("ipPools")[0].get("ipv6"):
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6AddressSpace": "True",
+ "ipv6DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
+ "ipv6DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
+
+ })
+
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay":
+ pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+
+ if pool_info.get("ipPools")[1].get("gateways") != []:
+ reserve_pool.update({"ipv6GateWay":
+ pool_info.get("ipPools")[1].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv6GateWay": ""})
+
+ elif not pool_info.get("ipPools")[1].get("ipv6"):
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
+ "ipv6AddressSpace": "True",
+ "ipv6DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv6DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps")
+ })
+ if pool_info.get("ipPools")[1].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay":
+ pool_info.get("ipPools")[1].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": ""})
+
+ if pool_info.get("ipPools")[0].get("gateways") != []:
+ reserve_pool.update({"ipv6GateWay":
+ pool_info.get("ipPools")[0].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv6GateWay": ""})
+ reserve_pool.update({"slaacSupport": True})
+ self.log("Formatted reserve pool details: {0}".format(reserve_pool), "DEBUG")
+ return reserve_pool
+
+ def get_network_params(self, site_id):
+ """
+ Process the Network parameters from the playbook
+ for Network configuration in Cisco Catalyst Center
+
+ Parameters:
+ site_id (str) - The Site ID for which network parameters are requested
+
+ Returns:
+ dict or None: Processed Network data in a format
+ suitable for Cisco Catalyst Center configuration, or None
+ if the response is not a dictionary or there was an error.
+ """
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function='get_network_v2',
+ params={"site_id": site_id}
+ )
+ self.log("Received API response from 'get_network_v2': {0}".format(response), "DEBUG")
+ if not isinstance(response, dict):
+ self.log("Failed to retrieve the network details - "
+ "Response is not a dictionary", "ERROR")
+ return None
+
+ # Extract various network-related details from the response
+ all_network_details = response.get("response")
+ dhcp_details = get_dict_result(all_network_details, "key", "dhcp.server")
+ dns_details = get_dict_result(all_network_details, "key", "dns.server")
+ snmp_details = get_dict_result(all_network_details, "key", "snmp.trap.receiver")
+ syslog_details = get_dict_result(all_network_details, "key", "syslog.server")
+ netflow_details = get_dict_result(all_network_details, "key", "netflow.collector")
+ ntpserver_details = get_dict_result(all_network_details, "key", "ntp.server")
+ timezone_details = get_dict_result(all_network_details, "key", "timezone.site")
+ messageoftheday_details = get_dict_result(all_network_details, "key", "device.banner")
+ network_aaa = get_dict_result(all_network_details, "key", "aaa.network.server.1")
+ network_aaa2 = get_dict_result(all_network_details, "key", "aaa.network.server.2")
+ network_aaa_pan = get_dict_result(all_network_details, "key", "aaa.server.pan.network")
+ clientAndEndpoint_aaa = get_dict_result(all_network_details, "key", "aaa.endpoint.server.1")
+ clientAndEndpoint_aaa2 = get_dict_result(all_network_details,
+ "key",
+ "aaa.endpoint.server.2")
+ clientAndEndpoint_aaa_pan = \
+ get_dict_result(all_network_details, "key", "aaa.server.pan.endpoint")
+
+ # Prepare the network details for Cisco Catalyst Center configuration
+ network_details = {
+ "settings": {
+ "snmpServer": {
+ "configureDnacIP": snmp_details.get("value")[0].get("configureDnacIP"),
+ "ipAddresses": snmp_details.get("value")[0].get("ipAddresses"),
+ },
+ "syslogServer": {
+ "configureDnacIP": syslog_details.get("value")[0].get("configureDnacIP"),
+ "ipAddresses": syslog_details.get("value")[0].get("ipAddresses"),
+ },
+ "netflowcollector": {
+ "ipAddress": netflow_details.get("value")[0].get("ipAddress"),
+ "port": netflow_details.get("value")[0].get("port")
+ },
+ "timezone": timezone_details.get("value")[0],
+ }
+ }
+ network_settings = network_details.get("settings")
+ if dhcp_details and dhcp_details.get("value") != []:
+ network_settings.update({"dhcpServer": dhcp_details.get("value")})
+ else:
+ network_settings.update({"dhcpServer": [""]})
+
+ if dns_details is not None:
+ network_settings.update({
+ "dnsServer": {
+ "domainName": dns_details.get("value")[0].get("domainName"),
+ "primaryIpAddress": dns_details.get("value")[0].get("primaryIpAddress"),
+ "secondaryIpAddress": dns_details.get("value")[0].get("secondaryIpAddress")
+ }
+ })
+
+ if ntpserver_details and ntpserver_details.get("value") != []:
+ network_settings.update({"ntpServer": ntpserver_details.get("value")})
+ else:
+ network_settings.update({"ntpServer": [""]})
+
+ if messageoftheday_details is not None:
+ network_settings.update({
+ "messageOfTheday": {
+ "bannerMessage": messageoftheday_details.get("value")[0].get("bannerMessage"),
+ }
+ })
+ retain_existing_banner = messageoftheday_details.get("value")[0] \
+ .get("retainExistingBanner")
+ if retain_existing_banner is True:
+ network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "true"
+ })
+ else:
+ network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "false"
+ })
+
+ if network_aaa and network_aaa_pan:
+ aaa_pan_value = network_aaa_pan.get("value")[0]
+ aaa_value = network_aaa.get("value")[0]
+ if aaa_pan_value == "None":
+ network_settings.update({
+ "network_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": network_aaa2.get("value")[0].get("ipAddress"),
+ "servers": "AAA"
+ }
+ })
+ else:
+ network_settings.update({
+ "network_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": aaa_pan_value,
+ "servers": "ISE"
+ }
+ })
+
+ if clientAndEndpoint_aaa and clientAndEndpoint_aaa_pan:
+ aaa_pan_value = clientAndEndpoint_aaa_pan.get("value")[0]
+ aaa_value = clientAndEndpoint_aaa.get("value")[0]
+ if aaa_pan_value == "None":
+ network_settings.update({
+ "clientAndEndpoint_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": clientAndEndpoint_aaa2.get("value")[0].get("ipAddress"),
+ "servers": "AAA"
+ }
+ })
+ else:
+ network_settings.update({
+ "clientAndEndpoint_aaa": {
+ "network": aaa_value.get("ipAddress"),
+ "protocol": aaa_value.get("protocol"),
+ "ipAddress": aaa_pan_value,
+ "servers": "ISE"
+ }
+ })
+
+ self.log("Formatted playbook network details: {0}".format(network_details), "DEBUG")
+ return network_details
+
+ def global_pool_exists(self, name):
+ """
+ Check if the Global Pool with the given name exists
+
+ Parameters:
+ name (str) - The name of the Global Pool to check for existence
+
+ Returns:
+ dict - A dictionary containing information about the Global Pool's existence:
+ - 'exists' (bool): True if the Global Pool exists, False otherwise.
+ - 'id' (str or None): The ID of the Global Pool if it exists, or None if it doesn't.
+ - 'details' (dict or None): Details of the Global Pool if it exists, else None.
+ """
+
+ global_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_global_pool",
+ )
+ if not isinstance(response, dict):
+ self.log("Failed to retrieve the global pool details - "
+ "Response is not a dictionary", "CRITICAL")
+ return global_pool
+
+ all_global_pool_details = response.get("response")
+ global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name)
+ self.log("Global ip pool name: {0}".format(name), "DEBUG")
+ self.log("Global pool details: {0}".format(global_pool_details), "DEBUG")
+ if not global_pool_details:
+ self.log("Global pool {0} does not exist".format(name), "INFO")
+ return global_pool
+ global_pool.update({"exists": True})
+ global_pool.update({"id": global_pool_details.get("id")})
+ global_pool["details"] = self.get_global_pool_params(global_pool_details)
+
+ self.log("Formatted global pool details: {0}".format(global_pool), "DEBUG")
+ return global_pool
+
+ def reserve_pool_exists(self, name, site_name):
+ """
+ Check if the Reserved pool with the given name exists in a specific site
+ Use check_return_status() to check for failure
+
+ Parameters:
+ name (str) - The name of the Reserved pool to check for existence.
+ site_name (str) - The name of the site where the Reserved pool is located.
+
+ Returns:
+ dict - A dictionary containing information about the Reserved pool's existence:
+ - 'exists' (bool): True if the Reserved pool exists in the specified site, else False.
+ - 'id' (str or None): The ID of the Reserved pool if it exists, or None if it doesn't.
+ - 'details' (dict or None): Details of the Reserved pool if it exists, or else None.
+ """
+
+ reserve_pool = {
+ "exists": False,
+ "details": None,
+ "id": None,
+ "success": True
+ }
+ site_id = self.get_site_id(site_name)
+ self.log("Site ID for the site name {0}: {1}".format(site_name, site_id), "DEBUG")
+ if not site_id:
+ reserve_pool.update({"success": False})
+ self.msg = "Failed to get the site id from the site name {0}".format(site_name)
+ self.status = "failed"
+ return reserve_pool
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_reserve_ip_subpool",
+ params={"siteId": site_id}
+ )
+ if not isinstance(response, dict):
+ reserve_pool.update({"success": False})
+ self.msg = "Error in getting reserve pool - Response is not a dictionary"
+ self.status = "exited"
+ return reserve_pool
+
+ all_reserve_pool_details = response.get("response")
+ reserve_pool_details = get_dict_result(all_reserve_pool_details, "groupName", name)
+ if not reserve_pool_details:
+ self.log("Reserved pool {0} does not exist in the site {1}"
+ .format(name, site_name), "DEBUG")
+ return reserve_pool
+
+ reserve_pool.update({"exists": True})
+ reserve_pool.update({"id": reserve_pool_details.get("id")})
+ reserve_pool.update({"details": self.get_reserve_pool_params(reserve_pool_details)})
+
+ self.log("Reserved pool details: {0}".format(reserve_pool.get("details")), "DEBUG")
+ self.log("Reserved pool id: {0}".format(reserve_pool.get("id")), "DEBUG")
+ return reserve_pool
+
+ def get_have_global_pool(self, config):
+ """
+ Get the current Global Pool information from
+ Cisco Catalyst Center based on the provided playbook details.
+ check this API using check_return_status.
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool configuration.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ global_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ global_pool_settings = config.get("global_pool_details").get("settings")
+ if global_pool_settings is None:
+ self.msg = "settings in global_pool_details is missing in the playbook"
+ self.status = "failed"
+ return self
+
+ global_pool_ippool = global_pool_settings.get("ip_pool")
+ if global_pool_ippool is None:
+ self.msg = "ip_pool in global_pool_details is missing in the playbook"
+ self.status = "failed"
+ return self
+
+ name = global_pool_ippool[0].get("name")
+ if name is None:
+ self.msg = "Mandatory Parameter name required"
+ self.status = "failed"
+ return self
+
+ # If the Global Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ global_pool = self.global_pool_exists(name)
+ self.log("Global pool details: {0}".format(global_pool), "DEBUG")
+ prev_name = global_pool_ippool[0].get("prev_name")
+ if global_pool.get("exists") is False and \
+ prev_name is not None:
+ global_pool = self.global_pool_exists(prev_name)
+ if global_pool.get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in global_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
+
+ self.log("Global pool exists: {0}".format(global_pool.get("exists")), "DEBUG")
+ self.log("Current Site: {0}".format(global_pool.get("details")), "DEBUG")
+ self.have.update({"globalPool": global_pool})
+ self.msg = "Collecting the global pool details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_have_reserve_pool(self, config):
+ """
+ Get the current Reserved Pool information from Cisco Catalyst Center
+ based on the provided playbook details.
+ Check this API using check_return_status
+
+ Parameters:
+ config (list of dict) - Playbook details containing Reserved Pool configuration.
+
+ Returns:
+ self - The current object with updated information.
+ """
+
+ reserve_pool = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ reserve_pool_details = config.get("reserve_pool_details")
+ name = reserve_pool_details.get("name")
+ if name is None:
+ self.msg = "Mandatory Parameter name required in reserve_pool_details\n"
+ self.status = "failed"
+ return self
+
+ site_name = reserve_pool_details.get("site_name")
+ self.log("Site Name: {0}".format(site_name), "DEBUG")
+ if site_name is None:
+ self.msg = "Missing parameter 'site_name' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ # Check if the Reserved Pool exists in Cisco Catalyst Center
+ # based on the provided name and site name
+ reserve_pool = self.reserve_pool_exists(name, site_name)
+ if not reserve_pool.get("success"):
+ return self.check_return_status()
+ self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
+
+ # If the Reserved Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ prev_name = reserve_pool_details.get("prev_name")
+ if reserve_pool.get("exists") is False and \
+ prev_name is not None:
+ reserve_pool = self.reserve_pool_exists(prev_name, site_name)
+ if not reserve_pool.get("success"):
+ return self.check_return_status()
+
+ # If the previous name doesn't exist in Cisco Catalyst Center, return with error
+ if reserve_pool.get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
+
+ self.log("Reserved pool exists: {0}".format(reserve_pool.get("exists")), "DEBUG")
+ self.log("Reserved pool: {0}".format(reserve_pool.get("details")), "DEBUG")
+
+ # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean)
+ if reserve_pool.get("exists"):
+ reserve_pool_details = reserve_pool.get("details")
+ if reserve_pool_details.get("ipv6AddressSpace") == "False":
+ reserve_pool_details.update({"ipv6AddressSpace": False})
+ else:
+ reserve_pool_details.update({"ipv6AddressSpace": True})
+
+ self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
+ self.have.update({"reservePool": reserve_pool})
+ self.msg = "Collecting the reserve pool details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_have_network(self, config):
+ """
+ Get the current Network details from Cisco Catalyst
+ Center based on the provided playbook details.
+
+ Parameters:
+ config (dict) - Playbook details containing Network Management configuration.
+
+ Returns:
+ self - The current object with updated Network information.
+ """
+ network = {}
+ site_name = config.get("network_management_details").get("site_name")
+ if site_name is None:
+ self.msg = "Mandatory Parameter 'site_name' missing"
+ self.status = "failed"
+ return self
+
+ site_id = self.get_site_id(site_name)
+ if site_id is None:
+ self.msg = "Failed to get site id from {0}".format(site_name)
+ self.status = "failed"
+ return self
+
+ network["site_id"] = site_id
+ network["net_details"] = self.get_network_params(site_id)
+ self.log("Network details from the Catalyst Center: {0}".format(network), "DEBUG")
+ self.have.update({"network": network})
+ self.msg = "Collecting the network details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current Global Pool Reserved Pool and Network details from Cisco Catalyst Center
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self - The current object with updated Global Pool,
+ Reserved Pool, and Network information.
+ """
+
+ if config.get("global_pool_details") is not None:
+ self.get_have_global_pool(config).check_return_status()
+
+ if config.get("reserve_pool_details") is not None:
+ self.get_have_reserve_pool(config).check_return_status()
+
+ if config.get("network_management_details") is not None:
+ self.get_have_network(config).check_return_status()
+
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.msg = "Successfully retrieved the details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_want_global_pool(self, global_ippool):
+ """
+ Get all the Global Pool information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ global_ippool (dict) - Playbook global pool details containing IpAddressSpace,
+ DHCP server IPs, DNS server IPs, IP pool name, IP pool CIDR, gateway, and type.
+
+ Returns:
+ self - The current object with updated desired Global Pool information.
+ """
+
+ # Initialize the desired Global Pool configuration
+ want_global = {
+ "settings": {
+ "ippool": [{
+ "IpAddressSpace": global_ippool.get("ip_address_space"),
+ "dhcpServerIps": global_ippool.get("dhcp_server_ips"),
+ "dnsServerIps": global_ippool.get("dns_server_ips"),
+ "ipPoolName": global_ippool.get("name"),
+ "ipPoolCidr": global_ippool.get("cidr"),
+ "gateway": global_ippool.get("gateway"),
+ "type": global_ippool.get("pool_type"),
+ }]
+ }
+ }
+ want_ippool = want_global.get("settings").get("ippool")[0]
+
+ # Converting to the required format based on the existing Global Pool
+ if not self.have.get("globalPool").get("exists"):
+ if want_ippool.get("dhcpServerIps") is None:
+ want_ippool.update({"dhcpServerIps": []})
+ if want_ippool.get("dnsServerIps") is None:
+ want_ippool.update({"dnsServerIps": []})
+ if want_ippool.get("IpAddressSpace") is None:
+ want_ippool.update({"IpAddressSpace": ""})
+ if want_ippool.get("gateway") is None:
+ want_ippool.update({"gateway": ""})
+ if want_ippool.get("type") is None:
+ want_ippool.update({"type": "Generic"})
+ else:
+ have_ippool = self.have.get("globalPool").get("details") \
+ .get("settings").get("ippool")[0]
+
+ # Copy existing Global Pool information if the desired configuration is not provided
+ want_ippool.update({
+ "IpAddressSpace": have_ippool.get("IpAddressSpace"),
+ "type": have_ippool.get("type"),
+ "ipPoolCidr": have_ippool.get("ipPoolCidr")
+ })
+ want_ippool.update({})
+ want_ippool.update({})
+
+ for key in ["dhcpServerIps", "dnsServerIps", "gateway"]:
+ if want_ippool.get(key) is None and have_ippool.get(key) is not None:
+ want_ippool[key] = have_ippool[key]
+
+ self.log("Global pool playbook details: {0}".format(want_global), "DEBUG")
+ self.want.update({"wantGlobal": want_global})
+ self.msg = "Collecting the global pool details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want_reserve_pool(self, reserve_pool):
+ """
+ Get all the Reserved Pool information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ reserve_pool (dict) - Playbook reserved pool
+ details containing various properties.
+
+ Returns:
+ self - The current object with updated desired Reserved Pool information.
+ """
+
+ want_reserve = {
+ "name": reserve_pool.get("name"),
+ "type": reserve_pool.get("pool_type"),
+ "ipv6AddressSpace": reserve_pool.get("ipv6_address_space"),
+ "ipv4GlobalPool": reserve_pool.get("ipv4_global_pool"),
+ "ipv4Prefix": reserve_pool.get("ipv4_prefix"),
+ "ipv4PrefixLength": reserve_pool.get("ipv4_prefix_length"),
+ "ipv4GateWay": reserve_pool.get("ipv4_gateway"),
+ "ipv4DhcpServers": reserve_pool.get("ipv4_dhcp_servers"),
+ "ipv4DnsServers": reserve_pool.get("ipv4_dns_servers"),
+ "ipv4Subnet": reserve_pool.get("ipv4_subnet"),
+ "ipv6GlobalPool": reserve_pool.get("ipv6_global_pool"),
+ "ipv6Prefix": reserve_pool.get("ipv6_prefix"),
+ "ipv6PrefixLength": reserve_pool.get("ipv6_prefix_length"),
+ "ipv6GateWay": reserve_pool.get("ipv6_gateway"),
+ "ipv6DhcpServers": reserve_pool.get("ipv6_dhcp_servers"),
+ "ipv6Subnet": reserve_pool.get("ipv6_subnet"),
+ "ipv6DnsServers": reserve_pool.get("ipv6_dns_servers"),
+ "ipv4TotalHost": reserve_pool.get("ipv4_total_host"),
+ "ipv6TotalHost": reserve_pool.get("ipv6_total_host")
+ }
+
+ # Check for missing mandatory parameters in the playbook
+ if not want_reserve.get("name"):
+ self.msg = "Missing mandatory parameter 'name' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("ipv4Prefix") is True:
+ if want_reserve.get("ipv4Subnet") is None and \
+ want_reserve.get("ipv4TotalHost") is None:
+ self.msg = "missing parameter 'ipv4_subnet' or 'ipv4TotalHost' \
+ while adding the ipv4 in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("ipv6Prefix") is True:
+ if want_reserve.get("ipv6Subnet") is None and \
+ want_reserve.get("ipv6TotalHost") is None:
+ self.msg = "missing parameter 'ipv6_subnet' or 'ipv6TotalHost' \
+ while adding the ipv6 in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ self.log("Reserved IP pool playbook details: {0}".format(want_reserve), "DEBUG")
+
+ # If there are no existing Reserved Pool details, validate and set defaults
+ if not self.have.get("reservePool").get("details"):
+ if not want_reserve.get("ipv4GlobalPool"):
+ self.msg = "missing parameter 'ipv4GlobalPool' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if not want_reserve.get("ipv4PrefixLength"):
+ self.msg = "missing parameter 'ipv4_prefix_length' in reserve_pool_details"
+ self.status = "failed"
+ return self
+
+ if want_reserve.get("type") is None:
+ want_reserve.update({"type": "Generic"})
+ if want_reserve.get("ipv4GateWay") is None:
+ want_reserve.update({"ipv4GateWay": ""})
+ if want_reserve.get("ipv4DhcpServers") is None:
+ want_reserve.update({"ipv4DhcpServers": []})
+ if want_reserve.get("ipv4DnsServers") is None:
+ want_reserve.update({"ipv4DnsServers": []})
+ if want_reserve.get("ipv6AddressSpace") is None:
+ want_reserve.update({"ipv6AddressSpace": False})
+ if want_reserve.get("slaacSupport") is None:
+ want_reserve.update({"slaacSupport": True})
+ if want_reserve.get("ipv4TotalHost") is None:
+ del want_reserve['ipv4TotalHost']
+ if want_reserve.get("ipv6AddressSpace") is True:
+ want_reserve.update({"ipv6Prefix": True})
+ else:
+ del want_reserve['ipv6Prefix']
+
+ if not want_reserve.get("ipv6AddressSpace"):
+ keys_to_check = ['ipv6GlobalPool', 'ipv6PrefixLength',
+ 'ipv6GateWay', 'ipv6DhcpServers',
+ 'ipv6DnsServers', 'ipv6TotalHost']
+ for key in keys_to_check:
+ if want_reserve.get(key) is None:
+ del want_reserve[key]
+ else:
+ keys_to_delete = ['type', 'ipv4GlobalPool',
+ 'ipv4Prefix', 'ipv4PrefixLength',
+ 'ipv4TotalHost', 'ipv4Subnet']
+ for key in keys_to_delete:
+ if key in want_reserve:
+ del want_reserve[key]
+
+ self.want.update({"wantReserve": want_reserve})
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Collecting the reserve pool details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want_network(self, network_management_details):
+ """
+ Get all the Network related information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ network_management_details (dict) - Playbook network
+ details containing various network settings.
+
+ Returns:
+ self - The current object with updated desired Network-related information.
+ """
+
+ want_network = {
+ "settings": {
+ "dhcpServer": {},
+ "dnsServer": {},
+ "snmpServer": {},
+ "syslogServer": {},
+ "netflowcollector": {},
+ "ntpServer": {},
+ "timezone": "",
+ "messageOfTheday": {},
+ "network_aaa": {},
+ "clientAndEndpoint_aaa": {}
+ }
+ }
+ want_network_settings = want_network.get("settings")
+ self.log("Current state (have): {0}".format(self.have), "DEBUG")
+ if network_management_details.get("dhcp_server") is not None:
+ want_network_settings.update({
+ "dhcpServer": network_management_details.get("dhcp_server")
+ })
+ else:
+ del want_network_settings["dhcpServer"]
+
+ if network_management_details.get("ntp_server") is not None:
+ want_network_settings.update({
+ "ntpServer": network_management_details.get("ntp_server")
+ })
+ else:
+ del want_network_settings["ntpServer"]
+
+ if network_management_details.get("timezone") is not None:
+ want_network_settings["timezone"] = \
+ network_management_details.get("timezone")
+ else:
+ self.msg = "missing parameter timezone in network"
+ self.status = "failed"
+ return self
+
+ dnsServer = network_management_details.get("dns_server")
+ if dnsServer is not None:
+ if dnsServer.get("domain_name") is not None:
+ want_network_settings.get("dnsServer").update({
+ "domainName":
+ dnsServer.get("domain_name")
+ })
+
+ if dnsServer.get("primary_ip_address") is not None:
+ want_network_settings.get("dnsServer").update({
+ "primaryIpAddress":
+ dnsServer.get("primary_ip_address")
+ })
+
+ if dnsServer.get("secondary_ip_address") is not None:
+ want_network_settings.get("dnsServer").update({
+ "secondaryIpAddress":
+ dnsServer.get("secondary_ip_address")
+ })
+ else:
+ del want_network_settings["dnsServer"]
+
+ snmpServer = network_management_details.get("snmp_server")
+ if snmpServer is not None:
+ if snmpServer.get("configure_dnac_ip") is not None:
+ want_network_settings.get("snmpServer").update({
+ "configureDnacIP": snmpServer.get("configure_dnac_ip")
+ })
+ if snmpServer.get("ip_addresses") is not None:
+ want_network_settings.get("snmpServer").update({
+ "ipAddresses": snmpServer.get("ip_addresses")
+ })
+ else:
+ del want_network_settings["snmpServer"]
+
+ syslogServer = network_management_details.get("syslog_server")
+ if syslogServer is not None:
+ if syslogServer.get("configure_dnac_ip") is not None:
+ want_network_settings.get("syslogServer").update({
+ "configureDnacIP": syslogServer.get("configure_dnac_ip")
+ })
+ if syslogServer.get("ip_addresses") is not None:
+ want_network_settings.get("syslogServer").update({
+ "ipAddresses": syslogServer.get("ip_addresses")
+ })
+ else:
+ del want_network_settings["syslogServer"]
+
+ netflowcollector = network_management_details.get("netflow_collector")
+ if netflowcollector is not None:
+ if netflowcollector.get("ip_address") is not None:
+ want_network_settings.get("netflowcollector").update({
+ "ipAddress":
+ netflowcollector.get("ip_address")
+ })
+ if netflowcollector.get("port") is not None:
+ want_network_settings.get("netflowcollector").update({
+ "port":
+ netflowcollector.get("port")
+ })
+ else:
+ del want_network_settings["netflowcollector"]
+
+ messageOfTheday = network_management_details.get("message_of_the_day")
+ if messageOfTheday is not None:
+ if messageOfTheday.get("banner_message") is not None:
+ want_network_settings.get("messageOfTheday").update({
+ "bannerMessage":
+ messageOfTheday.get("banner_message")
+ })
+ if messageOfTheday.get("retain_existing_banner") is not None:
+ want_network_settings.get("messageOfTheday").update({
+ "retainExistingBanner":
+ messageOfTheday.get("retain_existing_banner")
+ })
+ else:
+ del want_network_settings["messageOfTheday"]
+
+ network_aaa = network_management_details.get("network_aaa")
+ if network_aaa:
+ if network_aaa.get("ip_address"):
+ want_network_settings.get("network_aaa").update({
+ "ipAddress":
+ network_aaa.get("ip_address")
+ })
+ else:
+ if network_aaa.get("servers") == "ISE":
+ self.msg = "missing parameter ip_address in network_aaa, server ISE is set"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("network"):
+ want_network_settings.get("network_aaa").update({
+ "network": network_aaa.get("network")
+ })
+ else:
+ self.msg = "missing parameter network in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("protocol"):
+ want_network_settings.get("network_aaa").update({
+ "protocol":
+ network_aaa.get("protocol")
+ })
+ else:
+ self.msg = "missing parameter protocol in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("servers"):
+ want_network_settings.get("network_aaa").update({
+ "servers":
+ network_aaa.get("servers")
+ })
+ else:
+ self.msg = "missing parameter servers in network_aaa"
+ self.status = "failed"
+ return self
+
+ if network_aaa.get("shared_secret"):
+ want_network_settings.get("network_aaa").update({
+ "sharedSecret":
+ network_aaa.get("shared_secret")
+ })
+ else:
+ del want_network_settings["network_aaa"]
+
+ clientAndEndpoint_aaa = network_management_details.get("client_and_endpoint_aaa")
+ if clientAndEndpoint_aaa:
+ if clientAndEndpoint_aaa.get("ip_address"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "ipAddress":
+ clientAndEndpoint_aaa.get("ip_address")
+ })
+ else:
+ if clientAndEndpoint_aaa.get("servers") == "ISE":
+ self.msg = "missing parameter ip_address in clientAndEndpoint_aaa, \
+ server ISE is set"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("network"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "network":
+ clientAndEndpoint_aaa.get("network")
+ })
+ else:
+ self.msg = "missing parameter network in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("protocol"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "protocol":
+ clientAndEndpoint_aaa.get("protocol")
+ })
+ else:
+ self.msg = "missing parameter protocol in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("servers"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "servers":
+ clientAndEndpoint_aaa.get("servers")
+ })
+ else:
+ self.msg = "missing parameter servers in clientAndEndpoint_aaa"
+ self.status = "failed"
+ return self
+
+ if clientAndEndpoint_aaa.get("shared_secret"):
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "sharedSecret":
+ clientAndEndpoint_aaa.get("shared_secret")
+ })
+ else:
+ del want_network_settings["clientAndEndpoint_aaa"]
+
+ self.log("Network playbook details: {0}".format(want_network), "DEBUG")
+ self.want.update({"wantNetwork": want_network})
+ self.msg = "Collecting the network details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want(self, config):
+ """
+ Get all the Global Pool Reserved Pool and Network related information from playbook
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ None
+ """
+
+ if config.get("global_pool_details"):
+ global_ippool = config.get("global_pool_details").get("settings").get("ip_pool")[0]
+ self.get_want_global_pool(global_ippool).check_return_status()
+
+ if config.get("reserve_pool_details"):
+ reserve_pool = config.get("reserve_pool_details")
+ self.get_want_reserve_pool(reserve_pool).check_return_status()
+
+ if config.get("network_management_details"):
+ network_management_details = config.get("network_management_details") \
+ .get("settings")
+ self.get_want_network(network_management_details).check_return_status()
+
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Successfully retrieved details from the playbook"
+ self.status = "success"
+ return self
+
+ def update_global_pool(self, config):
+ """
+ Update/Create Global Pool in Cisco Catalyst Center with fields provided in playbook
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ None
+ """
+
+ name = config.get("global_pool_details") \
+ .get("settings").get("ip_pool")[0].get("name")
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ result_global_pool.get("response").update({name: {}})
+
+ # Check pool exist, if not create and return
+ if not self.have.get("globalPool").get("exists"):
+ pool_params = self.want.get("wantGlobal")
+ self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="create_global_pool",
+ params=pool_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Successfully created global pool '{0}'.".format(name), "INFO")
+ result_global_pool.get("response").get(name) \
+ .update({"globalPool Details": self.want.get("wantGlobal")})
+ result_global_pool.get("msg").update({name: "Global Pool Created Successfully"})
+ return
+
+ # Pool exists, check update is required
+ if not self.requires_update(self.have.get("globalPool").get("details"),
+ self.want.get("wantGlobal"), self.global_pool_obj_params):
+ self.log("Global pool '{0}' doesn't require an update".format(name), "INFO")
+ result_global_pool.get("response").get(name).update({
+ "Cisco Catalyst Center params":
+ self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
+ })
+ result_global_pool.get("response").get(name).update({
+ "Id": self.have.get("globalPool").get("id")
+ })
+ result_global_pool.get("msg").update({
+ name: "Global pool doesn't require an update"
+ })
+ return
+
+ self.log("Global pool requires update", "DEBUG")
+ # Pool Exists
+ pool_params = copy.deepcopy(self.want.get("wantGlobal"))
+ pool_params_ippool = pool_params.get("settings").get("ippool")[0]
+ pool_params_ippool.update({"id": self.have.get("globalPool").get("id")})
+ self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ keys_to_remove = ["IpAddressSpace", "ipPoolCidr", "type"]
+ for key in keys_to_remove:
+ del pool_params["settings"]["ippool"][0][key]
+
+ have_ippool = self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
+ keys_to_update = ["dhcpServerIps", "dnsServerIps", "gateway"]
+ for key in keys_to_update:
+ if pool_params_ippool.get(key) is None:
+ pool_params_ippool[key] = have_ippool.get(key)
+
+ self.log("Desired global pool details (want): {0}".format(pool_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="update_global_pool",
+ params=pool_params,
+ )
+
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Global pool '{0}' updated successfully".format(name), "INFO")
+ result_global_pool.get("response").get(name) \
+ .update({"Id": self.have.get("globalPool").get("details").get("id")})
+ result_global_pool.get("msg").update({name: "Global Pool Updated Successfully"})
+ return
+
+ def update_reserve_pool(self, config):
+ """
+ Update or Create a Reserve Pool in Cisco Catalyst Center based on the provided configuration.
+ This method checks if a reserve pool with the specified name exists in Cisco Catalyst Center.
+ If it exists and requires an update, it updates the pool. If not, it creates a new pool.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Reserve Pool information.
+
+ Returns:
+ None
+ """
+
+ name = config.get("reserve_pool_details").get("name")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ result_reserve_pool.get("response").update({name: {}})
+ self.log("Current reserved pool details in Catalyst Center: {0}"
+ .format(self.have.get("reservePool").get("details")), "DEBUG")
+ self.log("Desired reserved pool details in Catalyst Center: {0}"
+ .format(self.want.get("wantReserve")), "DEBUG")
+
+ # Check pool exist, if not create and return
+ self.log("IPv4 global pool: {0}"
+ .format(self.want.get("wantReserve").get("ipv4GlobalPool")), "DEBUG")
+ site_name = config.get("reserve_pool_details").get("site_name")
+ reserve_params = self.want.get("wantReserve")
+ site_id = self.get_site_id(site_name)
+ reserve_params.update({"site_id": site_id})
+ if not self.have.get("reservePool").get("exists"):
+ self.log("Desired reserved pool details (want): {0}".format(reserve_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="reserve_ip_subpool",
+ params=reserve_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Successfully created IP subpool reservation '{0}'.".format(name), "INFO")
+ result_reserve_pool.get("response").get(name) \
+ .update({"reservePool Details": self.want.get("wantReserve")})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip Subpool Reservation Created Successfully"})
+ return
+
+ # Check update is required
+ if not self.requires_update(self.have.get("reservePool").get("details"),
+ self.want.get("wantReserve"), self.reserve_pool_obj_params):
+ self.log("Reserved ip subpool '{0}' doesn't require an update".format(name), "INFO")
+ result_reserve_pool.get("response").get(name) \
+ .update({"Cisco Catalyst Center params": self.have.get("reservePool").get("details")})
+ result_reserve_pool.get("response").get(name) \
+ .update({"Id": self.have.get("reservePool").get("id")})
+ result_reserve_pool.get("msg") \
+ .update({name: "Reserve ip subpool doesn't require an update"})
+ return
+
+ self.log("Reserved ip pool '{0}' requires an update".format(name), "DEBUG")
+ # Pool Exists
+ self.log("Current reserved ip pool '{0}' details in Catalyst Center: {1}"
+ .format(name, self.have.get("reservePool")), "DEBUG")
+ self.log("Desired reserved ip pool '{0}' details: {1}"
+ .format(name, self.want.get("wantReserve")), "DEBUG")
+ reserve_params.update({"id": self.have.get("reservePool").get("id")})
+ response = self.dnac._exec(
+ family="network_settings",
+ function="update_reserve_ip_subpool",
+ params=reserve_params,
+ )
+ self.check_execution_response_status(response).check_return_status()
+ self.log("Reserved ip subpool '{0}' updated successfully.".format(name), "INFO")
+ result_reserve_pool['msg'] = "Reserved Ip Subpool Updated Successfully"
+ result_reserve_pool.get("response").get(name) \
+ .update({"Reservation details": self.have.get("reservePool").get("details")})
+ return
+
+ def update_network(self, config):
+ """
+ Update or create a network configuration in Cisco Catalyst
+ Center based on the provided playbook details.
+
+ Parameters:
+ config (list of dict) - Playbook details containing Network Management information.
+
+ Returns:
+ None
+ """
+
+ site_name = config.get("network_management_details").get("site_name")
+ result_network = self.result.get("response")[2].get("network")
+ result_network.get("response").update({site_name: {}})
+
+ # Check update is required or not
+ if not self.requires_update(self.have.get("network").get("net_details"),
+ self.want.get("wantNetwork"), self.network_obj_params):
+
+ self.log("Network in site '{0}' doesn't require an update.".format(site_name), "INFO")
+ result_network.get("response").get(site_name).update({
+ "Cisco Catalyst Center params": self.have.get("network")
+ .get("net_details").get("settings")
+ })
+ result_network.get("msg").update({site_name: "Network doesn't require an update"})
+ return
+
+ self.log("Network in site '{0}' requires update.".format(site_name), "INFO")
+ self.log("Current State of network in Catalyst Center: {0}"
+ .format(self.have.get("network")), "DEBUG")
+ self.log("Desired State of network: {0}".format(self.want.get("wantNetwork")), "DEBUG")
+
+ net_params = copy.deepcopy(self.want.get("wantNetwork"))
+ net_params.update({"site_id": self.have.get("network").get("site_id")})
+ response = self.dnac._exec(
+ family="network_settings",
+ function='update_network_v2',
+ params=net_params,
+ )
+ self.log("Received API response of 'update_network_v2': {0}".format(response), "DEBUG")
+ validation_string = "desired common settings operation successful"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.log("Network has been changed successfully", "INFO")
+ result_network.get("msg") \
+ .update({site_name: "Network Updated successfully"})
+ result_network.get("response").get(site_name) \
+ .update({"Network Details": self.want.get("wantNetwork").get("settings")})
+ return
+
+ def get_diff_merged(self, config):
+ """
+ Update or create Global Pool, Reserve Pool, and
+ Network configurations in Cisco Catalyst Center based on the playbook details
+
+ Parameters:
+ config (list of dict) - Playbook details containing
+ Global Pool, Reserve Pool, and Network Management information.
+
+ Returns:
+ self
+ """
+
+ if config.get("global_pool_details") is not None:
+ self.update_global_pool(config)
+
+ if config.get("reserve_pool_details") is not None:
+ self.update_reserve_pool(config)
+
+ if config.get("network_management_details") is not None:
+ self.update_network(config)
+
+ return self
+
+ def delete_reserve_pool(self, name):
+ """
+ Delete a Reserve Pool by name in Cisco Catalyst Center
+
+ Parameters:
+ name (str) - The name of the Reserve Pool to be deleted.
+
+ Returns:
+ self
+ """
+
+ reserve_pool_exists = self.have.get("reservePool").get("exists")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+
+ if not reserve_pool_exists:
+ result_reserve_pool.get("response").update({name: "Reserve Pool not found"})
+ self.msg = "Reserved Ip Subpool Not Found"
+ self.status = "success"
+ return self
+
+ self.log("Reserved IP pool scheduled for deletion: {0}"
+ .format(self.have.get("reservePool").get("name")), "INFO")
+ _id = self.have.get("reservePool").get("id")
+ self.log("Reserved pool {0} id: {1}".format(name, _id), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="release_reserve_ip_subpool",
+ params={"id": _id},
+ )
+ self.check_execution_response_status(response).check_return_status()
+ executionid = response.get("executionId")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ result_reserve_pool.get("response").update({name: {}})
+ result_reserve_pool.get("response").get(name) \
+ .update({"Execution Id": executionid})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip subpool reservation released successfully"})
+ self.msg = "Reserved pool - {0} released successfully".format(name)
+ self.status = "success"
+ return self
+
+ def delete_global_pool(self, name):
+ """
+ Delete a Global Pool by name in Cisco Catalyst Center
+
+ Parameters:
+ name (str) - The name of the Global Pool to be deleted.
+
+ Returns:
+ self
+ """
+
+ global_pool_exists = self.have.get("globalPool").get("exists")
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ if not global_pool_exists:
+ result_global_pool.get("response").update({name: "Global Pool not found"})
+ self.msg = "Global pool Not Found"
+ self.status = "success"
+ return self
+
+ response = self.dnac._exec(
+ family="network_settings",
+ function="delete_global_ip_pool",
+ params={"id": self.have.get("globalPool").get("id")},
+ )
+
+ # Check the execution status
+ self.check_execution_response_status(response).check_return_status()
+ executionid = response.get("executionId")
+
+ # Update result information
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ result_global_pool.get("response").update({name: {}})
+ result_global_pool.get("response").get(name).update({"Execution Id": executionid})
+ result_global_pool.get("msg").update({name: "Pool deleted successfully"})
+ self.msg = "Global pool - {0} deleted successfully".format(name)
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete Reserve Pool and Global Pool in Cisco Catalyst Center based on playbook details.
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ self
+ """
+
+ if config.get("reserve_pool_details") is not None:
+ name = config.get("reserve_pool_details").get("name")
+ self.delete_reserve_pool(name).check_return_status()
+
+ if config.get("global_pool_details") is not None:
+ name = config.get("global_pool_details") \
+ .get("settings").get("ip_pool")[0].get("name")
+ self.delete_global_pool(name).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Requested State (want): {0}".format(self.want), "INFO")
+ if config.get("global_pool_details") is not None:
+ self.log("Desired State of global pool (want): {0}"
+ .format(self.want.get("wantGlobal")), "DEBUG")
+ self.log("Current State of global pool (have): {0}"
+ .format(self.have.get("globalPool").get("details")), "DEBUG")
+ if self.requires_update(self.have.get("globalPool").get("details"),
+ self.want.get("wantGlobal"), self.global_pool_obj_params):
+ self.msg = "Global Pool Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated global pool '{0}'.".format(self.want
+ .get("wantGlobal").get("settings").get("ippool")[0].get("ipPoolName")), "INFO")
+ self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
+
+ if config.get("reserve_pool_details") is not None:
+ if self.requires_update(self.have.get("reservePool").get("details"),
+ self.want.get("wantReserve"), self.reserve_pool_obj_params):
+ self.log("Desired State for reserve pool (want): {0}"
+ .format(self.want.get("wantReserve")), "DEBUG")
+ self.log("Current State for reserve pool (have): {0}"
+ .format(self.have.get("reservePool").get("details")), "DEBUG")
+ self.msg = "Reserved Pool Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the reserved pool '{0}'."
+ .format(self.want.get("wantReserve").get("name")), "INFO")
+ self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
+
+ if config.get("network_management_details") is not None:
+ if self.requires_update(self.have.get("network").get("net_details"),
+ self.want.get("wantNetwork"), self.network_obj_params):
+ self.msg = "Network Functions Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the network functions '{0}'."
+ .format(config.get("network_management_details").get("site_name")), "INFO")
+ self.result.get("response")[2].get("network").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Global Pool, Reserve Pool \
+ and the Network Functions."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ if config.get("global_pool_details") is not None:
+ global_pool_exists = self.have.get("globalPool").get("exists")
+ if global_pool_exists:
+ self.msg = "Global Pool Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of Global Pool '{0}'."
+ .format(config.get("global_pool_details")
+ .get("settings").get("ip_pool")[0].get("name")), "INFO")
+ self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
+
+ if config.get("reserve_pool_details") is not None:
+ reserve_pool_exists = self.have.get("reservePool").get("exists")
+ if reserve_pool_exists:
+ self.msg = "Reserved Pool Config is not applied to the Catalyst Center"
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated the absence of Reserve Pool '{0}'."
+ .format(config.get("reserve_pool_details").get("name")), "INFO")
+ self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the absence of Global Pool/Reserve Pool"
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+
+ self.have.clear()
+ self.want.clear()
+ return
+
+
+def main():
+ """main entry point for module execution"""
+
+ # Define the specification for module arguments
+ element_spec = {
+ "dnac_host": {"type": 'str', "required": True},
+ "dnac_port": {"type": 'str', "default": '443'},
+ "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']},
+ "dnac_password": {"type": 'str', "no_log": True},
+ "dnac_verify": {"type": 'bool', "default": 'True'},
+ "dnac_version": {"type": 'str', "default": '2.2.3.3'},
+ "dnac_debug": {"type": 'bool', "default": False},
+ "dnac_log": {"type": 'bool', "default": False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ "dnac_api_task_timeout": {"type": 'int', "default": 1200},
+ "dnac_task_poll_interval": {"type": 'int', "default": 2},
+ "config": {"type": 'list', "required": True, "elements": 'dict'},
+ "state": {"default": 'merged', "choices": ['merged', 'deleted']},
+ "validate_response_schema": {"type": 'bool', "default": True},
+ }
+
+ # Create an AnsibleModule object with argument specifications
+ module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False)
+ ccc_network = NetworkSettings(module)
+ state = ccc_network.params.get("state")
+ config_verify = ccc_network.params.get("config_verify")
+ if state not in ccc_network.supported_states:
+ ccc_network.status = "invalid"
+ ccc_network.msg = "State {0} is invalid".format(state)
+ ccc_network.check_return_status()
+
+ ccc_network.validate_input().check_return_status()
+
+ for config in ccc_network.config:
+ ccc_network.reset_values()
+ ccc_network.get_have(config).check_return_status()
+ if state != "deleted":
+ ccc_network.get_want(config).check_return_status()
+ ccc_network.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_network.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_network.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_intent.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_intent.py
index 02c89721a..3c71046a9 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_intent.py
@@ -1,439 +1,247 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright (c) 2022, Cisco Systems
+# Copyright (c) 2024, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
-
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary")
+__author__ = ("Abinash Mishra, Madhan Sankaranarayanan, Rishita Chowdhary")
DOCUMENTATION = r"""
---
module: pnp_intent
short_description: Resource module for Site and PnP related functions
description:
-- Manage operations add device, claim device and unclaim device of Onboarding Configuration(PnP) resource
-- API to add device to pnp inventory and claim it to a site.
-- API to delete device from the pnp inventory.
-version_added: '6.6.0'
+ - Manage operations add device, claim device and unclaim device of Onboarding
+ Configuration(PnP) resource
+ - API to add device to pnp inventory and claim it to a site.
+ - API to delete device from the pnp inventory.
+ - API to reset the device from errored state.
+version_added: 6.6.0
extends_documentation_fragment:
- cisco.dnac.intent_params
-author: Madhan Sankaranarayanan (@madhansansel)
- Rishita Chowdhary (@rishitachowdhary)
+author: Abinash Mishra (@abimishr) Madhan Sankaranarayanan (@madhansansel)
+ Rishita Chowdhary (@rishitachowdhary)
options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after
+ applying the playbook config.
+ type: bool
+ default: false
state:
- description: The state of DNAC after module completion.
+ description: The state of Cisco Catalyst Center after module completion.
type: str
- choices: [ merged, deleted ]
+ choices:
+ - merged
+ - deleted
default: merged
config:
description:
- - List of details of device being managed.
+ - List of details of device being managed.
type: list
elements: dict
required: true
suboptions:
- template_name:
- description: Name of template to be configured on the device.
- type: str
- image_name:
- description: Name of image to be configured on the device
- type: str
- golden_image:
- description: Is the image to be condifgured tagged as golden image
- type: bool
- site_name:
- description: Name of the site for which device will be claimed.
- type: str
- deviceInfo:
- description: Pnp Device's deviceInfo.
- type: dict
+ device_info:
+ description:
+ - Provides the device-specific information required for adding devices
+ to the PnP database that are not already present.
+ - For adding a single device, the list should contain exactly one set
+ of device information. If a site name is also provided, the device
+ can be claimed immediately after being added.
+ - For bulk import, the list must contain information for more than one
+ device. Bulk import is intended solely for adding devices; claiming
+ must be performed with separate tasks or configurations.
+ type: list
+ required: true
+ elements: dict
suboptions:
- aaaCredentials:
- description: Pnp Device's aaaCredentials.
- type: dict
- suboptions:
- password:
- description: Pnp Device's password.
- type: str
- username:
- description: Pnp Device's username.
- type: str
- addedOn:
- description: Pnp Device's addedOn.
- type: int
- addnMacAddrs:
- description: Pnp Device's addnMacAddrs.
- elements: str
- type: list
- agentType:
- description: Pnp Device's agentType.
- type: str
- authStatus:
- description: Pnp Device's authStatus.
- type: str
- authenticatedSudiSerialNo:
- description: Pnp Device's authenticatedSudiSerialNo.
- type: str
- capabilitiesSupported:
- description: Pnp Device's capabilitiesSupported.
- elements: str
- type: list
- cmState:
- description: Pnp Device's cmState.
- type: str
- description:
- description: Pnp Device's description.
- type: str
- deviceSudiSerialNos:
- description: Pnp Device's deviceSudiSerialNos.
- elements: str
- type: list
- deviceType:
- description: Pnp Device's deviceType.
- type: str
- featuresSupported:
- description: Pnp Device's featuresSupported.
- elements: str
- type: list
- fileSystemList:
- description: Pnp Device's fileSystemList.
- type: list
- elements: dict
- suboptions:
- freespace:
- description: Pnp Device's freespace.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- readable:
- description: Readable flag.
- type: bool
- size:
- description: Pnp Device's size.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- writeable:
- description: Writeable flag.
- type: bool
- firstContact:
- description: Pnp Device's firstContact.
- type: int
hostname:
- description: Pnp Device's hostname.
+ description:
+ - Defines the desired hostname for the PnP device after it has
+ been claimed.
+ - The hostname can only be assigned or changed during the claim
+ process, not during bulk or single device additions.
type: str
- httpHeaders:
- description: Pnp Device's httpHeaders.
- type: list
- elements: dict
- suboptions:
- key:
- description: Pnp Device's key.
- type: str
- value:
- description: Pnp Device's value.
- type: str
- imageFile:
- description: Pnp Device's imageFile.
- type: str
- imageVersion:
- description: Pnp Device's imageVersion.
- type: str
- ipInterfaces:
- description: Pnp Device's ipInterfaces.
- elements: dict
- type: list
- suboptions:
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6AddressList:
- description: Pnp Device's ipv6AddressList.
- elements: dict
- type: list
- macAddress:
- description: Pnp Device's macAddress.
- type: str
- name:
- description: Pnp Device's name.
- type: str
- status:
- description: Pnp Device's status.
- type: str
- lastContact:
- description: Pnp Device's lastContact.
- type: int
- lastSyncTime:
- description: Pnp Device's lastSyncTime.
- type: int
- lastUpdateOn:
- description: Pnp Device's lastUpdateOn.
- type: int
- location:
- description: Pnp Device's location.
- type: dict
- suboptions:
- address:
- description: Pnp Device's address.
- type: str
- altitude:
- description: Pnp Device's altitude.
- type: str
- latitude:
- description: Pnp Device's latitude.
- type: str
- longitude:
- description: Pnp Device's longitude.
- type: str
- siteId:
- description: Pnp Device's siteId.
- type: str
- macAddress:
- description: Pnp Device's macAddress.
- type: str
- mode:
- description: Pnp Device's mode.
- type: str
- name:
- description: Pnp Device's name.
- type: str
- neighborLinks:
- description: Pnp Device's neighborLinks.
- type: list
- elements: dict
- suboptions:
- localInterfaceName:
- description: Pnp Device's localInterfaceName.
- type: str
- localMacAddress:
- description: Pnp Device's localMacAddress.
- type: str
- localShortInterfaceName:
- description: Pnp Device's localShortInterfaceName.
- type: str
- remoteDeviceName:
- description: Pnp Device's remoteDeviceName.
- type: str
- remoteInterfaceName:
- description: Pnp Device's remoteInterfaceName.
- type: str
- remoteMacAddress:
- description: Pnp Device's remoteMacAddress.
- type: str
- remotePlatform:
- description: Pnp Device's remotePlatform.
- type: str
- remoteShortInterfaceName:
- description: Pnp Device's remoteShortInterfaceName.
- type: str
- remoteVersion:
- description: Pnp Device's remoteVersion.
- type: str
- onbState:
- description: Pnp Device's onbState.
+ state:
+ description:
+ - Represents the onboarding state of the PnP device.
+ - Possible values are 'Unclaimed', 'Claimed', or 'Provisioned'.
type: str
pid:
description: Pnp Device's pid.
type: str
- pnpProfileList:
- description: Pnp Device's pnpProfileList.
- type: list
- elements: dict
- suboptions:
- createdBy:
- description: Pnp Device's createdBy.
- type: str
- discoveryCreated:
- description: DiscoveryCreated flag.
- type: bool
- primaryEndpoint:
- description: Pnp Device's primaryEndpoint.
- type: dict
- suboptions:
- certificate:
- description: Pnp Device's certificate.
- type: str
- fqdn:
- description: Pnp Device's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device's ipv6Address.
- type: dict
- port:
- description: Pnp Device's port.
- type: int
- protocol:
- description: Pnp Device's protocol.
- type: str
- profileName:
- description: Pnp Device's profileName.
- type: str
- secondaryEndpoint:
- description: Pnp Device's secondaryEndpoint.
- type: dict
- suboptions:
- certificate:
- description: Pnp Device's certificate.
- type: str
- fqdn:
- description: Pnp Device's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device's ipv6Address.
- type: dict
- port:
- description: Pnp Device's port.
- type: int
- protocol:
- description: Pnp Device's protocol.
- type: str
- populateInventory:
- description: PopulateInventory flag.
- type: bool
- preWorkflowCliOuputs:
- description: Pnp Device's preWorkflowCliOuputs.
- type: list
- elements: dict
- suboptions:
- cli:
- description: Pnp Device's cli.
- type: str
- cliOutput:
- description: Pnp Device's cliOutput.
- type: str
- projectId:
- description: Pnp Device's projectId.
- type: str
- projectName:
- description: Pnp Device's projectName.
+ serial_number:
+ description: Pnp Device's serial_number.
type: str
- reloadRequested:
- description: ReloadRequested flag.
+ is_sudi_required:
+ description: Sudi Authentication requiremnet's flag.
type: bool
- serialNumber:
- description: Pnp Device's serialNumber.
- type: str
- smartAccountId:
- description: Pnp Device's smartAccountId.
- type: str
- source:
- description: Pnp Device's source.
- type: str
- stack:
- description: Stack flag.
- type: bool
- stackInfo:
- description: Pnp Device's stackInfo.
- type: dict
- suboptions:
- isFullRing:
- description: IsFullRing flag.
- type: bool
- stackMemberList:
- description: Pnp Device's stackMemberList.
- type: list
- elements: dict
- suboptions:
- hardwareVersion:
- description: Pnp Device's hardwareVersion.
- type: str
- licenseLevel:
- description: Pnp Device's licenseLevel.
- type: str
- licenseType:
- description: Pnp Device's licenseType.
- type: str
- macAddress:
- description: Pnp Device's macAddress.
- type: str
- pid:
- description: Pnp Device's pid.
- type: str
- priority:
- description: Pnp Device's priority.
- type: int
- role:
- description: Pnp Device's role.
- type: str
- serialNumber:
- description: Pnp Device's serialNumber.
- type: str
- softwareVersion:
- description: Pnp Device's softwareVersion.
- type: str
- stackNumber:
- description: Pnp Device's stackNumber.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- sudiSerialNumber:
- description: Pnp Device's sudiSerialNumber.
- type: str
- stackRingProtocol:
- description: Pnp Device's stackRingProtocol.
- type: str
- supportsStackWorkflows:
- description: SupportsStackWorkflows flag.
- type: bool
- totalMemberCount:
- description: Pnp Device's totalMemberCount.
- type: int
- validLicenseLevels:
- description: Pnp Device's validLicenseLevels.
- type: str
- state:
- description: Pnp Device's state.
- type: str
- sudiRequired:
- description: SudiRequired flag.
- type: bool
- tags:
- description: Pnp Device's tags.
- type: dict
- userSudiSerialNos:
- description: Pnp Device's userSudiSerialNos.
- elements: str
- type: list
- virtualAccountId:
- description: Pnp Device's virtualAccountId.
- type: str
- workflowId:
- description: Pnp Device's workflowId.
- type: str
- workflowName:
- description: Pnp Device's workflowName.
- type: str
-
+ site_name:
+ description: Name of the site for which device will be claimed.
+ type: str
+ project_name:
+ description: Name of the project under which the template is present
+ type: str
+ default: Onboarding Configuration
+ template_name:
+ description:
+ - Name of template to be configured on the device.
+ - Supported for EWLC from Cisco Catalyst Center release version
+ 2.3.7.x onwards.
+ type: str
+ template_params:
+ description:
+ - Parameter values for the parameterised templates.
+ - Each varibale has a value that needs to be passed as key-value pair
+ in the dictionary. We can pass values as
+ variable_name:variable_value.
+ - Supported for EWLC from Cisco Catalyst Center release version
+ 2.3.7.x onwards.
+ type: dict
+ image_name:
+ description: Name of image to be configured on the device
+ type: str
+ golden_image:
+ description: Is the image to be condifgured tagged as golden image
+ type: bool
+ pnp_type:
+ description: Specifies the device type for the Plug and Play (PnP) device. -
+ Options include 'Default', 'CatalystWLC', 'AccessPoint', or
+ 'StackSwitch'. - 'Default' is applicable to switches and routers. -
+ 'CatalystWLC' should be selected for 9800 series wireless controllers.
+ - 'AccessPoint' is used when claiming an access point. - 'StackSwitch'
+ should be chosen for a group of switches that operate as a single
+ switch, typically used in the access layer.
+ type: str
+ choices:
+ - Default
+ - CatalystWLC
+ - AccessPoint
+ - StackSwitch
+ default: Default
+ static_ip:
+ description: Management IP address of the Wireless Controller
+ type: str
+ subnet_mask:
+ description: Subnet Mask of the Management IP address of the Wireless Controller
+ type: str
+ gateway:
+ description: Gateway IP address of the Wireless Controller for getting pinged
+ type: str
+ vlan_id:
+ description: Vlan Id allocated for claimimg of Wireless Controller
+ type: str
+ ip_interface_name:
+ description: Specifies the interface name utilized for Plug and Play (PnP) by
+ the Wireless Controller. Ensure this interface is pre-configured on
+ the Controller prior to device claiming.
+ type: str
+ rf_profile:
+ description:
+ - Radio Frequecy (RF) profile of the AP being claimed.
+ - RF Profiles allow you to tune groups of APs that share a common
+ coverage zone together.
+ - They selectively change how Radio Resource Management will operate
+ the APs within that coverage zone.
+ - HIGH RF profile allows you to use more power and allows to join AP
+ with the client in an easier fashion.
+ - TYPICAL RF profile is a blend of moderate power and moderate
+ visibility to the client.
+ - LOW RF profile allows you to consume lesser power and has least
+ visibility to the client.
+ type: str
+ choices:
+ - HIGH
+ - LOW
+ - TYPICAL
requirements:
-- dnacentersdk == 2.4.5
-- python >= 3.5
+ - dnacentersdk == 2.6.10
+ - python >= 3.5
notes:
- - SDK Method used are
- device_onboarding_pnp.DeviceOnboardingPnp.add_device,
+ - SDK Method used are device_onboarding_pnp.DeviceOnboardingPnp.add_device,
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_list,
device_onboarding_pnp.DeviceOnboardingPnp.claim_a_device_to_a_site,
device_onboarding_pnp.DeviceOnboardingPnp.delete_device_by_id_from_pnp,
-
- - Paths used are
- post /dna/intent/api/v1/onboarding/pnp-device
- post /dna/intent/api/v1/onboarding/pnp-device/site-claim
- post /dna/intent/api/v1/onboarding/pnp-device/{id}
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_count,
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_by_id,
+ device_onboarding_pnp.DeviceOnboardingPnp.update_device,
+ sites.Sites.get_site,
+ software_image_management_swim.SoftwareImageManagementSwim.get_software_image_details,
+ configuration_templates.ConfigurationTemplates.gets_the_templates_available
+ - Paths used are post /dna/intent/api/v1/onboarding/pnp-device post
+ /dna/intent/api/v1/onboarding/pnp-device/site-claim post
+ /dna/intent/api/v1/onboarding/pnp-device/{id} get
+ /dna/intent/api/v1/onboarding/pnp-device/count get
+ /dna/intent/api/v1/onboarding/pnp-device put /onboarding/pnp-device/${id}
+ get /dna/intent/api/v1/site get /dna/intent/api/v1/image/importation get
+ /dna/intent/api/v1/template-programmer/template
"""
EXAMPLES = r"""
-- name: Add a new device and claim the device
+- name: Import multiple switches in bulk only
+ cisco.dnac.pnp_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: False
+ - serial_number: QTC2320E0H9
+ state: Unclaimed
+ pid: c9300-24P
+ hostname: Test-123
+ - serial_number: ETC2320E0HB
+ state: Unclaimed
+ pid: c9300-24P
+
+- name: Add a new EWLC and claim it
+ cisco.dnac.pnp_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: FOX2639PAY7
+ hostname: New_WLC
+ state: Unclaimed
+ pid: C9800-CL-K9
+ site_name: Global/USA/San Francisco/BGL_18
+ template_name: Ansible_PNP_WLC
+ template_params:
+ hostname: IAC-EWLC-Claimed
+ project_name: Onboarding Configuration
+ image_name: C9800-40-universalk9_wlc.17.12.01.SPA.bin
+ golden_image: true
+ pnp_type: CatalystWLC
+ static_ip: 204.192.101.10
+ subnet_mask: 255.255.255.0
+ gateway: 204.192.101.1
+ vlan_id: 1101
+ ip_interface_name: TenGigabitEthernet0/0/0
+
+- name: Claim a pre-added switch, apply a template, and perform an image upgrade for a specific site
cisco.dnac.pnp_intent:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -442,138 +250,48 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
dnac_log: True
state: merged
+ config_verify: True
config:
- template_name: string
- image_name: string
- site_name: string
- deviceInfo:
- aaaCredentials:
- password: string
- username: string
- addedOn: 0
- addnMacAddrs:
- - string
- agentType: string
- authStatus: string
- authenticatedSudiSerialNo: string
- capabilitiesSupported:
- - string
- cmState: string
- description: string
- deviceSudiSerialNos:
- - string
- deviceType: string
- featuresSupported:
- - string
- fileSystemList:
- - freespace: 0
- name: string
- readable: true
- size: 0
- type: string
- writeable: true
- firstContact: 0
- hostname: string
- httpHeaders:
- - key: string
- value: string
- imageFile: string
- imageVersion: string
- ipInterfaces:
- - ipv4Address: {}
- ipv6AddressList:
- - {}
- macAddress: string
- name: string
- status: string
- lastContact: 0
- lastSyncTime: 0
- lastUpdateOn: 0
- location:
- address: string
- altitude: string
- latitude: string
- longitude: string
- siteId: string
- macAddress: string
- mode: string
- name: string
- neighborLinks:
- - localInterfaceName: string
- localMacAddress: string
- localShortInterfaceName: string
- remoteDeviceName: string
- remoteInterfaceName: string
- remoteMacAddress: string
- remotePlatform: string
- remoteShortInterfaceName: string
- remoteVersion: string
- onbState: string
- pid: string
- pnpProfileList:
- - createdBy: string
- discoveryCreated: true
- primaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- profileName: string
- secondaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- populateInventory: true
- preWorkflowCliOuputs:
- - cli: string
- cliOutput: string
- projectId: string
- projectName: string
- reloadRequested: true
- serialNumber: string
- smartAccountId: string
- source: string
- stack: true
- stackInfo:
- isFullRing: true
- stackMemberList:
- - hardwareVersion: string
- licenseLevel: string
- licenseType: string
- macAddress: string
- pid: string
- priority: 0
- role: string
- serialNumber: string
- softwareVersion: string
- stackNumber: 0
- state: string
- sudiSerialNumber: string
- stackRingProtocol: string
- supportsStackWorkflows: true
- totalMemberCount: 0
- validLicenseLevels: string
- state: string
- sudiRequired: true
- tags: {}
- userSudiSerialNos:
- - string
- virtualAccountId: string
- workflowId: string
- workflowName: string
+ - device_info:
+ - serial_number: FJC271924EQ
+ hostname: Switch
+ state: Unclaimed
+ pid: C9300-48UXM
+ site_name: Global/USA/San Francisco/BGL_18
+ template_name: "Ansible_PNP_Switch"
+ image_name: cat9k_iosxe_npe.17.03.07.SPA.bin
+ project_name: Onboarding Configuration
+ template_params:
+ hostname: SJC-Switch-1
+ interface: TwoGigabitEthernet1/0/2
+
+- name: Remove multiple devices from the PnP dashboard safely (ignores non-existent devices)
+ cisco.dnac.pnp_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: deleted
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ - serial_number: FTC2320E0HA
+ - serial_number: FKC2310E0HB
"""
RETURN = r"""
#Case_1: When the device is claimed successfully.
response_1:
- description: A dictionary with the response returned by the Cisco DNAC Python SDK
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
returned: always
type: dict
sample: >
@@ -588,7 +306,7 @@ response_1:
#Case_2: Given site/image/template/project not found or Device is not found for deletion
response_2:
- description: A list with the response returned by the Cisco DNAC Python SDK
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
returned: always
type: list
sample: >
@@ -599,7 +317,7 @@ response_2:
#Case_3: Error while deleting/claiming a device
response_3:
- description: A string with the response returned by the Cisco DNAC Python SDK
+ description: A string with the response returned by the Cisco Catalyst Center Python SDK
returned: always
type: dict
sample: >
@@ -608,347 +326,975 @@ response_3:
"msg": String
}
"""
-
-import copy
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
- DNACSDK,
+ DnacBase,
validate_list_of_dicts,
- log,
- get_dict_result,
+ get_dict_result
)
-class DnacPnp:
+class PnP(DnacBase):
def __init__(self, module):
- self.module = module
- self.params = module.params
- self.config = copy.deepcopy(module.params.get("config"))
- self.have = []
- self.want = []
- self.diff = []
- self.validated = []
- dnac_params = self.get_dnac_params(self.params)
- log(str(dnac_params))
- self.dnac = DNACSDK(params=dnac_params)
- self.log = dnac_params.get("dnac_log")
-
- self.result = dict(changed=False, diff=[], response=[], warnings=[])
-
- def get_state(self):
- return self.params.get("state")
+ super().__init__(module)
def validate_input(self):
- pnp_spec = dict(
- template_name=dict(required=True, type='str'),
- project_name=dict(required=False, type='str', default="Onboarding Configuration"),
- site_name=dict(required=True, type='str'),
- image_name=dict(required=True, type='str'),
- golden_image=dict(required=False, type='bool'),
- deviceInfo=dict(required=True, type='dict'),
- pnp_type=dict(required=False, type=str, default="Default")
- )
-
- if self.config:
- msg = None
-
- # Validate template params
- if self.log:
- log(str(self.config))
- valid_pnp, invalid_params = validate_list_of_dicts(
- self.config, pnp_spec
- )
-
- if invalid_params:
- msg = "Invalid parameters in playbook: {0}".format(
- "\n".join(invalid_params)
- )
- self.module.fail_json(msg=msg)
-
- self.validated = valid_pnp
-
- if self.log:
- log(str(valid_pnp))
- log(str(self.validated))
-
- def get_dnac_params(self, params):
- dnac_params = dict(
- dnac_host=params.get("dnac_host"),
- dnac_port=params.get("dnac_port"),
- dnac_username=params.get("dnac_username"),
- dnac_password=params.get("dnac_password"),
- dnac_verify=params.get("dnac_verify"),
- dnac_debug=params.get("dnac_debug"),
- dnac_log=params.get("dnac_log")
+ """
+ Validate the fields provided in the playbook. Checks the
+ configuration provided in the playbook against a predefined
+ specification to ensure it adheres to the expected structure
+ and data types.
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call
+ 'validate_input' on it.If the validation succeeds, 'self.status'
+ will be 'success'and 'self.validated_config' will contain the
+ validated configuration. If it fails, 'self.status' will be
+ 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ pnp_spec = {
+ 'template_name': {'type': 'str', 'required': False},
+ 'template_params': {'type': 'dict', 'required': False},
+ 'project_name': {'type': 'str', 'required': False,
+ 'default': 'Onboarding Configuration'},
+ 'site_name': {'type': 'str', 'required': False},
+ 'image_name': {'type': 'str', 'required': False},
+ 'golden_image': {'type': 'bool', 'required': False},
+ 'device_info': {'type': 'list', 'required': True,
+ 'elements': 'dict'},
+ 'pnp_type': {'type': 'str', 'required': False, 'default': 'Default'},
+ "rf_profile": {'type': 'str', 'required': False},
+ "static_ip": {'type': 'str', 'required': False},
+ "subnet_mask": {'type': 'str', 'required': False},
+ "gateway": {'type': 'str', 'required': False},
+ "vlan_id": {'type': 'str', 'required': False},
+ "ip_interface_name": {'type': 'str', 'required': False},
+ "sensorProfile": {'type': 'str', 'required': False}
+ }
+
+ # Validate pnp params
+ valid_pnp, invalid_params = validate_list_of_dicts(
+ self.config, pnp_spec
)
- return dnac_params
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+ self.validated_config = valid_pnp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_pnp))
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_site_details(self):
+ """
+ Check whether the site exists or not, along with side id
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_exits: A boolean value indicating the existence of the site.
+ - site_id: The Id of the site i.e. required to claim device to site.
+ Example:
+ Post creation of the validated input, we this method gets the
+ site_id and checks whether the site exists or not
+ """
- def site_exists(self):
site_exists = False
site_id = None
response = None
+
try:
- response = self.dnac._exec(
+ response = self.dnac_apply['exec'](
family="sites",
function='get_site',
params={"name": self.want.get("site_name")},
)
- except Exception as e:
+ except Exception:
+ self.log("Exception occurred as site \
+ '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
self.module.fail_json(msg="Site not found", response=[])
if response:
- if self.log:
- log(str(response))
-
+ self.log("Received site details \
+ for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG")
site = response.get("response")
- site_id = site[0].get("id")
- site_exists = True
+ if len(site) == 1:
+ site_id = site[0].get("id")
+ site_exists = True
+ self.log("Site Name: {1}, Site ID: {0}".format(site_id, self.want.get("site_name")), "INFO")
return (site_exists, site_id)
+ def get_site_type(self):
+ """
+ Fetches the type of site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_type: A string indicating the type of the
+ site (area/building/floor).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the site.
+ """
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": self.want.get("site_name")},
+ )
+ except Exception:
+ self.log("Exception occurred as \
+ site '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log("Received site details\
+ for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+ self.log("Site type for site name '{1}' : {0}".format(site_type, self.want.get("site_name")), "INFO")
+
+ return site_type
+
def get_pnp_params(self, params):
- pnp_params = {}
- pnp_params['_id'] = params.get('_id')
- pnp_params['deviceInfo'] = params.get('deviceInfo')
- pnp_params['runSummaryList'] = params.get('runSummaryList')
- pnp_params['systemResetWorkflow'] = params.get('systemResetWorkflow')
- pnp_params['systemWorkflow'] = params.get('systemWorkflow')
- pnp_params['tenantId'] = params.get('tenantId')
- pnp_params['version'] = params.get('device_version')
- pnp_params['workflow'] = params.get('workflow')
- pnp_params['workflowParameters'] = params.get('workflowParameters')
-
- return pnp_params
+ """
+ Store pnp parameters from the playbook for pnp processing in Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ - params: The validated params passed from the playbook.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - pnp_params: A dictionary containing all the values indicating
+ the type of the site (area/building/floor).
+ Example:
+ Post creation of the validated input, it fetches the required paramters
+ and stores it for further processing and calling the parameters in
+ other APIs.
+ """
+
+ params_list = params["device_info"]
+ device_info_list = []
+ for param in params_list:
+ device_dict = {}
+ param["serialNumber"] = param.pop("serial_number")
+ if "is_sudi_required" in param:
+ param["isSudiRequired"] = param.pop("is_sudi_required")
+ device_dict["deviceInfo"] = param
+ device_info_list.append(device_dict)
+
+ self.log("PnP paramters passed are {0}".format(str(params_list)), "INFO")
+ return device_info_list
def get_image_params(self, params):
- image_params = dict(
- image_name=params.get("image_name"),
- is_tagged_golden=params.get("golden_image"),
- )
-
+ """
+ Get image name and the confirmation whether it's tagged golden or not
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ - params: The validated params passed from the playbook.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - image_params: A dictionary containing all the values indicating
+ name of the image and its golden image status.
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ image_params = {
+ 'image_name': params.get('image_name'),
+ 'is_tagged_golden': params.get('golden_image')
+ }
+
+ self.log("Image details are {0}".format(str(image_params)), "INFO")
return image_params
- def get_claim_params(self):
- imageinfo = dict(
- imageId=self.have.get("image_id")
- )
- configinfo = dict(
- configId=self.have.get("template_id"),
- configParameters=[dict(
- key="",
- value=""
- )]
- )
- claim_params = dict(
- deviceId=self.have.get("device_id"),
- siteId=self.have.get("site_id"),
- type=self.want.get("pnp_type"),
- hostname=self.want.get("hostname"),
- imageInfo=imageinfo,
- configInfo=configinfo,
- )
+ def pnp_cred_failure(self, msg=None):
+ """
+ Method for failing discovery if there is any discrepancy in the PnP credentials
+ passed by the user
+ """
+
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+ def get_claim_params(self):
+ """
+ Get the paramters needed for claiming the device to site.
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - claim_params: A dictionary needed for calling the POST call
+ for claim a device to a site API.
+ Example:
+ The stored dictionary can be used to call the API claim a device
+ to a site via SDK
+ """
+
+ imageinfo = {
+ 'imageId': self.have.get('image_id')
+ }
+ template_params = self.validated_config[0].get("template_params")
+ configinfo = {
+ 'configId': self.have.get('template_id'),
+ 'configParameters': [
+ {
+ 'key': '',
+ 'value': ''
+ }
+ ]
+ }
+
+ if configinfo.get("configId") and template_params:
+ if isinstance(template_params, dict):
+ if len(template_params) > 0:
+ configinfo["configParameters"] = []
+ for key, value in template_params.items():
+ config_dict = {
+ 'key': key,
+ 'value': value
+ }
+ configinfo["configParameters"].append(config_dict)
+
+ claim_params = {
+ 'deviceId': self.have.get('device_id'),
+ 'siteId': self.have.get('site_id'),
+ 'type': self.want.get('pnp_type'),
+ 'hostname': self.want.get('hostname'),
+ 'imageInfo': imageinfo,
+ 'configInfo': configinfo,
+ }
+
+ if claim_params["type"] == "CatalystWLC":
+ if not (self.validated_config[0].get('static_ip')):
+ msg = "A static IP address is required to claim a wireless controller. Please provide one."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('subnet_mask')):
+ msg = "Please provide a subnet mask to claim a wireless controller. "\
+ "This information is mandatory for the configuration."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('gateway')):
+ msg = "A gateway IP is required to claim a wireless controller. Please ensure to provide it."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('ip_interface_name')):
+ msg = "Please provide the Interface Name to claim a wireless controller. This information is necessary"\
+ " for making it a logical interface post claiming which can used to help manage the Wireless SSIDs "\
+ "broadcasted by the access points, manage the controller, access point and user data, plus more."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('vlan_id')):
+ msg = "Please provide the Vlan ID to claim a wireless controller. This is a required field for the process"\
+ " to create and set the specified port as trunk during PnP."
+ self.pnp_cred_failure(msg=msg)
+ claim_params["staticIP"] = self.validated_config[0]['static_ip']
+ claim_params["subnetMask"] = self.validated_config[0]['subnet_mask']
+ claim_params["gateway"] = self.validated_config[0]['gateway']
+ claim_params["vlanId"] = str(self.validated_config[0].get('vlan_id'))
+ claim_params["ipInterfaceName"] = self.validated_config[0]['ip_interface_name']
+
+ if claim_params["type"] == "AccessPoint":
+ if not (self.validated_config[0].get("rf_profile")):
+ msg = "The RF Profile for claiming an AP must be passed"
+ self.pnp_cred_failure(msg=msg)
+ claim_params["rfProfile"] = self.validated_config[0]["rf_profile"]
+
+ self.log("Paramters used for claiming are {0}".format(str(claim_params)), "INFO")
return claim_params
+ def get_reset_params(self):
+ """
+ Get the paramters needed for resetting the device in an errored state.
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - reset_params: A dictionary needed for calling the PUT call
+ for update device details API.
+ Example:
+ The stored dictionary can be used to call the API update device details
+ """
+
+ reset_params = {
+ "deviceResetList": [
+ {
+ "configList": [
+ {
+ "configId": self.have.get('template_id'),
+ "configParameters": [
+ {
+ "key": "",
+ "value": ""
+ }
+ ]
+ }
+ ],
+ "deviceId": self.have.get('device_id'),
+ "licenseLevel": "",
+ "licenseType": "",
+ "topOfStackSerialNumber": ""
+ }
+ ]
+ }
+
+ self.log("Paramters used for resetting from errored state:{0}".format(str(reset_params)), "INFO")
+ return reset_params
+
def get_have(self):
+ """
+ Get the current image, template and site details from the Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.image_response: A list of image passed by the user
+ - self.template_list: A list of template under project
+ - self.device_response: Gets the device_id and stores it
+ Example:
+ Stored paramters are used to call the APIs to get the current image,
+ template and site details to call the API for various types of devices
+ """
have = {}
- if self.params.get("state") == "merged":
- # check if given image exists, if exists store image_id
- image_response = self.dnac._exec(
- family="software_image_management_swim",
- function='get_software_image_details',
- params=self.want.get("image_params"),
+ # Claiming is only allowed for single addition of devices
+ if len(self.want.get('pnp_params')) == 1:
+ # check if given device exists in pnp inventory, store device Id
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": self.want.get("serial_number")}
)
+ self.log("Device details for the device with serial \
+ number '{0}': {1}".format(self.want.get("serial_number"), str(device_response)), "DEBUG")
- if self.log:
- log(str(image_response))
+ if not (device_response and (len(device_response) == 1)):
+ self.log("Device with serial number {0} is not found in the inventory".format(self.want.get("serial_number")), "WARNING")
+ self.msg = "Adding the device to database"
+ self.status = "success"
+ self.have = have
+ have["device_found"] = False
+ return self
+
+ have["device_found"] = True
+ have["device_id"] = device_response[0].get("id")
+ self.log("Device Id: " + str(have["device_id"]))
+
+ if self.params.get("state") == "merged":
+ # check if given image exists, if exists store image_id
+ image_response = self.dnac_apply['exec'](
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params=self.want.get("image_params"),
+ )
+ image_list = image_response.get("response")
+ self.log("Image details obtained from the API 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+
+ # check if project has templates or not
+ template_list = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function='gets_the_templates_available',
+ params={"project_names": self.want.get("project_name")},
+ )
+ self.log("List of templates under the project '{0}': {1}".format(self.want.get("project_name"), str(template_list)), "DEBUG")
- image_list = image_response.get("response")
+ dev_details_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="get_device_by_id",
+ params={"id": device_response[0].get("id")}
+ )
+ self.log("Device details retrieved after calling the 'get_device_by_id' API: {0}".format(str(dev_details_response)), "DEBUG")
+ install_mode = dev_details_response.get("deviceInfo").get("mode")
+ self.log("Installation mode of the device with the serial no. '{0}':{1}".format(self.want.get("serial_number"), install_mode), "INFO")
+
+ # check if given site exits, if exists store current site info
+ site_exists = False
+ if not isinstance(self.want.get("site_name"), str) and \
+ not self.want.get('pnp_params')[0].get('deviceInfo'):
+ self.msg = "The site name must be a string"
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ site_name = self.want.get("site_name")
+ (site_exists, site_id) = self.get_site_details()
+
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("Site Exists: {0}\nSite Name: {1}\nSite ID: {2}".format(site_exists, site_name, site_id), "INFO")
+ if self.want.get("pnp_type") == "AccessPoint":
+ if self.get_site_type() != "floor":
+ self.msg = "Please ensure that the site type is specified as 'floor' when claiming an AP."\
+ " The site type is given as '{0}'. Please change the 'site_type' into 'floor' to "\
+ "proceed.".format(self.get_site_type())
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ if len(image_list) == 0:
+ self.msg = "The image '{0}' is either not present or not tagged as 'Golden' in the Cisco Catalyst Center."\
+ " Please verify its existence and its tag status.".format(self.validated_config[0].get("image_name"))
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ if len(image_list) == 1:
+ if install_mode != "INSTALL":
+ self.msg = "The system must be in INSTALL mode to upgrade the image. The current mode is '{0}'."\
+ " Please switch to INSTALL mode to proceed.".format(install_mode)
+ self.log(str(self.msg), "CRITICAL")
+ self.status = "failed"
+ return self
+
+ have["image_id"] = image_list[0].get("imageUuid")
+ self.log("Image ID for the image '{0}': {1}".format(self.want.get('image_params').get('image_name'), str(have["image_id"])), "INFO")
+
+ template_name = self.want.get("template_name")
+ if template_name:
+ if not (template_list and isinstance(template_list, list)):
+ self.msg = "Either project not found"\
+ " or it is Empty."
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ template_details = get_dict_result(template_list, 'name', template_name)
+ if template_details:
+ have["template_id"] = template_details.get("templateId")
+ else:
+ self.msg = "Template '{0}' is not found.".format(template_name)
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
- if len(image_list) == 1:
- have["image_id"] = image_list[0].get("imageUuid")
- if self.log:
- log("Image Id: " + str(have["image_id"]))
- else:
- self.module.fail_json(msg="Image not found", response=[])
+ else:
+ if not self.want.get('pnp_params')[0].get('deviceInfo'):
+ self.msg = "Either Site Name or Device details must be added."
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully collected all project and template \
+ parameters from Cisco Catalyst Center for comparison"
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.have = have
+ return self
+
+ def get_want(self, config):
+ """
+ Get all the image, template and site and pnp related
+ information from playbook that is needed to be created in Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ - config: validated config passed from the playbook
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.want: A dictionary of paramters obtained from the playbook.
+ - self.msg: A message indicating all the paramters from the playbook
+ are collected.
+ - self.status: Success.
+ Example:
+ It stores all the paramters passed from the playbook for further
+ processing before calling the APIs
+ """
+
+ self.want = {
+ 'image_params': self.get_image_params(config),
+ 'pnp_params': self.get_pnp_params(config),
+ 'pnp_type': config.get('pnp_type'),
+ 'site_name': config.get('site_name'),
+ 'project_name': config.get('project_name'),
+ 'template_name': config.get('template_name')
+ }
+ if len(self.want.get('pnp_params')) == 1:
+ self.want["serial_number"] = (
+ self.want['pnp_params'][0]["deviceInfo"].
+ get("serialNumber")
+ )
+ self.want["hostname"] = (
+ self.want['pnp_params'][0]["deviceInfo"].
+ get("hostname")
+ )
- # check if given template exists, if exists store template id
- template_list = self.dnac._exec(
- family="configuration_templates",
- function='gets_the_templates_available',
- params={"project_names": self.want.get("project_name")},
+ if self.want["pnp_type"] == "CatalystWLC":
+ self.want["static_ip"] = config.get('static_ip')
+ self.want["subnet_mask"] = config.get('subnet_mask')
+ self.want["gateway"] = config.get('gateway')
+ self.want["vlan_id"] = config.get('vlan_id')
+ self.want["ip_interface_name"] = config.get('ip_interface_name')
+
+ elif self.want["pnp_type"] == "AccessPoint":
+ self.want["rf_profile"] = config.get("rf_profile")
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_diff_merged(self):
+ """
+ If given device doesnot exist
+ then add it to pnp database and get the device id
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences. Based on the length of devices passed
+ it adds/claims or does both.
+ Description:
+ The function processes the differences and, depending on the
+ changes required, it may add, update,or resynchronize devices in
+ Cisco Catalyst Center. The updated results and status are stored in the
+ class instance for further use.
+ """
+
+ if not isinstance(self.want.get("pnp_params"), list):
+ self.msg = "Device Info must be passed as a list"
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ if len(self.want.get("pnp_params")) > 1:
+ devices_added = []
+ for device in self.want.get("pnp_params"):
+ multi_device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ self.log("Device details for serial number {0} \
+ obtained from the API 'get_device_list': {1}".format(device["deviceInfo"]["serialNumber"], str(multi_device_response)), "DEBUG")
+ if (multi_device_response and (len(multi_device_response) == 1)):
+ devices_added.append(device)
+ self.log("Details of the added device:{0}".format(str(device)), "INFO")
+ if (len(self.want.get("pnp_params")) - len(devices_added)) == 0:
+ self.result['response'] = []
+ self.result['msg'] = "Devices are already added"
+ self.log(self.result['msg'], "WARNING")
+ return self
+
+ bulk_list = [
+ device
+ for device in self.want.get("pnp_params")
+ if device not in devices_added
+ ]
+ bulk_params = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="import_devices_in_bulk",
+ params={"payload": bulk_list},
+ op_modifies=True,
)
+ self.log("Response from API 'import_devices_in_bulk' for imported devices: {0}".format(bulk_params), "DEBUG")
+ if len(bulk_params.get("successList")) > 0:
+ self.result['msg'] = "{0} device(s) imported successfully".format(
+ len(bulk_params.get("successList")))
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = bulk_params
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+ return self
+
+ self.msg = "Bulk import failed"
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ provisioned_count_params = {
+ "serial_number": self.want.get("serial_number"),
+ "state": "Provisioned"
+ }
+
+ planned_count_params = {
+ "serial_number": self.want.get("serial_number"),
+ "state": "Planned"
+ }
- if self.log:
- log(str(template_list))
+ if not self.have.get("device_found"):
+ if not self.want['pnp_params']:
+ self.msg = "Device needs to be added before claiming. Please add device_info"
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ if not self.want["site_name"]:
+ self.log("Adding device to pnp database", "INFO")
+ dev_add_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="add_device",
+ params=self.want.get('pnp_params')[0],
+ op_modifies=True,
+ )
- if template_list and isinstance(template_list, list):
- # API execution error returns a dict
- template_details = get_dict_result(template_list, 'name', self.want.get("template_name"))
- if template_details:
- have["template_id"] = template_details.get("templateId")
+ self.have["deviceInfo"] = dev_add_response.get("deviceInfo")
+ self.log("Response from API 'add device' for a single device addition: {0}".format(str(dev_add_response)), "DEBUG")
+ if self.have["deviceInfo"]:
+ self.result['msg'] = "Only Device Added Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = dev_add_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
- if self.log:
- log("Template Id: " + str(have["template_id"]))
else:
- self.module.fail_json(msg="Template not found", response=[])
+ self.msg = "Device Addition Failed"
+ self.log(self.result['msg'], "CRITICAL")
+ self.status = "failed"
+
+ return self
+
else:
- self.module.fail_json(msg="Project Not Found", response=[])
+ self.log("Adding device to pnp database")
+ dev_add_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="add_device",
+ params=self.want.get("pnp_params")[0],
+ op_modifies=True,
+ )
+ self.get_have().check_return_status()
+ self.have["deviceInfo"] = dev_add_response.get("deviceInfo")
+ self.log("Response from API 'add device' for single device addition: {0}".format(str(dev_add_response)), "DEBUG")
+ claim_params = self.get_claim_params()
+ claim_params["deviceId"] = dev_add_response.get("id")
+ claim_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='claim_a_device_to_a_site',
+ op_modifies=True,
+ params=claim_params,
+ )
- # check if given site exits, if exists store current site info
- site_name = self.want.get("site_name")
+ self.log("Response from API 'claim a device to a site' for a single claiming: {0}".format(str(dev_add_response)), "DEBUG")
+ if claim_response.get("response") == "Device Claimed" and self.have["deviceInfo"]:
+ self.result['msg'] = "Device Added and Claimed Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = claim_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
- site_exists = False
- (site_exists, site_id) = self.site_exists()
+ else:
+ self.msg = "Device Claim Failed"
+ self.log(self.result['msg'], "CRITICAL")
+ self.status = "failed"
- if site_exists:
- have["site_id"] = site_id
- if self.log:
- log("Site Exists: " + str(site_exists) + "\n Site_id:" + str(site_id))
- log("Site Name:" + str(site_name))
+ return self
- # check if given device exists in pnp inventory, store device Id
- device_response = self.dnac._exec(
+ prov_dev_response = self.dnac_apply['exec'](
family="device_onboarding_pnp",
- function='get_device_list',
- params={"serial_number": self.want.get("serial_number")}
+ function='get_device_count',
+ op_modifies=True,
+ params=provisioned_count_params,
)
+ self.log("Response from 'get device count' API for provisioned devices: {0}".format(str(prov_dev_response)), "DEBUG")
- if self.log:
- log(str(device_response))
+ plan_dev_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_count',
+ op_modifies=True,
+ params=planned_count_params,
+ )
+ self.log("Response from 'get_device_count' API for devices in planned state: {0}".format(str(plan_dev_response)), "DEBUG")
- if device_response and (len(device_response) == 1):
- have["device_id"] = device_response[0].get("id")
- have["device_found"] = True
+ dev_details_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="get_device_by_id",
+ params={"id": self.have["device_id"]}
+ )
+ self.log("Response from 'get_device_by_id' API for device details: {0}".format(str(dev_details_response)), "DEBUG")
- if self.log:
- log("Device Id: " + str(have["device_id"]))
- else:
- have["device_found"] = False
+ is_stack = False
+ if dev_details_response.get("deviceInfo").get("stack"):
+ is_stack = dev_details_response.get("deviceInfo").get("stack")
+ pnp_state = dev_details_response.get("deviceInfo").get("state")
+ self.log("PnP state of the device: {0}".format(pnp_state), "INFO")
- self.have = have
+ if not self.want["site_name"]:
+ self.result['response'] = self.have.get("device_found")
+ self.result['msg'] = "Device is already added"
+ self.log(self.result['msg'], "WARNING")
+ return self
- def get_want(self):
- for params in self.validated:
- want = dict(
- image_params=self.get_image_params(params),
- pnp_params=self.get_pnp_params(params),
- pnp_type=params.get("pnp_type"),
- site_name=params.get("site_name"),
- serial_number=params.get("deviceInfo").get("serialNumber"),
- hostname=params.get("deviceInfo").get("hostname"),
- project_name=params.get("project_name"),
- template_name=params.get("template_name")
- )
-
- self.want = want
+ update_payload = {"deviceInfo": self.want.get('pnp_params')[0].get("deviceInfo")}
+ update_payload["deviceInfo"]["stack"] = is_stack
- def get_diff_merge(self):
+ self.log("The request sent for 'update_device' API for device's config update: {0}".format(update_payload), "DEBUG")
+ update_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="update_device",
+ params={"id": self.have["device_id"],
+ "payload": update_payload},
+ op_modifies=True,
+ )
+ self.log("Response from 'update_device' API for device's config update: {0}".format(str(update_response)), "DEBUG")
- # if given device doesnot exist then add it to pnp database and get the device id
- if not self.have.get("device_found"):
- log("Adding device to pnp database")
- response = self.dnac._exec(
+ if pnp_state == "Error":
+ reset_paramters = self.get_reset_params()
+ reset_response = self.dnac_apply['exec'](
family="device_onboarding_pnp",
- function="add_device",
- params=self.want.get("pnp_params"),
+ function="reset_device",
+ params={"payload": reset_paramters},
op_modifies=True,
)
- self.have["device_id"] = response.get("id")
+ self.log("Response from 'update_device' API for errored state resolution: {0}".format(str(reset_response)), "DEBUG")
+ self.result['msg'] = "Device reset done Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = reset_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+
+ return self
- if self.log:
- log(str(response))
- log(self.have.get("device_id"))
+ if not (
+ prov_dev_response.get("response") == 0 and
+ plan_dev_response.get("response") == 0 and
+ pnp_state == "Unclaimed"
+ ):
+ self.result['response'] = self.have.get("device_found")
+ self.result['msg'] = "Device is already claimed"
+ self.log(self.result['msg'], "WARNING")
+ if update_response.get("deviceInfo"):
+ self.result['changed'] = True
+ return self
claim_params = self.get_claim_params()
- claim_response = self.dnac._exec(
+ self.log("Parameters for claiming the device: {0}".format(str(claim_params)), "DEBUG")
+
+ claim_response = self.dnac_apply['exec'](
family="device_onboarding_pnp",
function='claim_a_device_to_a_site',
op_modifies=True,
params=claim_params,
)
-
- if self.log:
- log(str(claim_response))
-
+ self.log("Response from 'claim_a_device_to_a_site' API for claiming: {0}".format(str(claim_response)), "DEBUG")
if claim_response.get("response") == "Device Claimed":
- self.result['changed'] = True
- self.result['msg'] = "Device Claimed Successfully"
+ self.result['msg'] = "Only Device Claimed Successfully"
+ self.log(self.result['msg'], "INFO")
self.result['response'] = claim_response
- self.result['diff'] = self.validated
- else:
- self.module.fail_json(msg="Device Claim Failed", response=claim_response)
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
- def get_diff_delete(self):
- if self.have.get("device_found"):
+ return self
+
+ def get_diff_deleted(self):
+ """
+ If the given device is added to pnp database
+ and is in unclaimed or failed state delete the
+ given device
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Here we pass a list of device info to be deleted
+ Returns:
+ self: An instance of the class with updated results and status based on
+ the deletion operation. It tells us the number of devices deleted if any of the devices
+ get deleted
+ Description:
+ This function is responsible for removing devices from the Cisco Catalyst Center PnP GUI and
+ pass new changes if devices are already deleted.
+ """
+ devices_deleted = []
+ devices_to_delete = self.want.get("pnp_params")[:]
+ for device in devices_to_delete:
+ multi_device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ self.log("Response from 'get_device_list' API for claiming: {0}".format(str(multi_device_response)), "DEBUG")
+ if multi_device_response and len(multi_device_response) == 1:
+ device_id = multi_device_response[0].get("id")
- try:
- response = self.dnac._exec(
+ response = self.dnac_apply['exec'](
family="device_onboarding_pnp",
function="delete_device_by_id_from_pnp",
op_modifies=True,
- params={"id": self.have.get("device_id")},
+ params={"id": device_id},
)
-
- if self.log:
- log(str(response))
-
- if response.get("deviceInfo").get("state") == "Deleted":
- self.result['changed'] = True
- self.result['response'] = response
- self.result['diff'] = self.validated
- self.result['msg'] = "Device Deleted Successfully"
+ self.log("Device details for the deleted device with \
+ serial number '{0}': {1}".format(device["deviceInfo"]["serialNumber"], str(response)), "DEBUG")
+ if response.get("deviceInfo", {}).get("state") == "Deleted":
+ devices_deleted.append(device["deviceInfo"]["serialNumber"])
+ self.want.get("pnp_params").remove(device)
else:
self.result['response'] = response
self.result['msg'] = "Error while deleting the device"
+ self.log(self.result['msg'], "CRITICAL")
- except Exception as errorstr:
- response = str(errorstr)
- msg = "Device Deletion Failed"
- self.module.fail_json(msg=msg, response=response)
-
+ if len(devices_deleted) > 0:
+ self.result['changed'] = True
+ self.result['response'] = devices_deleted
+ self.result['diff'] = self.want.get("pnp_params")
+ self.result['msg'] = "{0} Device(s) Deleted Successfully".format(len(devices_deleted))
+ self.log(self.result['msg'], "INFO")
else:
- self.module.fail_json(msg="Device Not Found", response=[])
+ self.result['msg'] = "Device(s) Not Found"
+ self.log(self.result['msg'], "WARNING")
+ self.result['response'] = devices_deleted
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of PnP configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's PnP Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+ for device in self.want.get("pnp_params"):
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ if (device_response and (len(device_response) == 1)):
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "present in Cisco Catalyst Center and"
+ " addition verified.".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "INFO")
+
+ else:
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "not present in Cisco Catalyst Center"
+ "Center".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "WARNING")
+
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of PnP configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified device(s) exists in the Cisco Catalyst Center configuration's
+ PnP Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for deleted state
+ for device in self.want.get("pnp_params"):
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ if not (device_response and (len(device_response) == 1)):
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "not present in the Cisco DNA"
+ "Center.".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "INFO")
+
+ else:
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "present in Cisco Catalyst Center".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "WARNING")
+
+ self.status = "success"
+ return self
def main():
- """ main entry point for module execution
+ """
+ main entry point for module execution
"""
- element_spec = dict(
- dnac_host=dict(required=True, type='str'),
- dnac_port=dict(type='str', default='443'),
- dnac_username=dict(type='str', default='admin', aliases=["user"]),
- dnac_password=dict(type='str', no_log=True),
- dnac_verify=dict(type='bool', default='True'),
- dnac_version=dict(type="str", default="2.2.3.3"),
- dnac_debug=dict(type='bool', default=False),
- dnac_log=dict(type='bool', default=False),
- validate_response_schema=dict(type="bool", default=True),
- config=dict(required=True, type='list', elements='dict'),
- state=dict(
- default='merged',
- choices=['merged', 'deleted']
- )
- )
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
module = AnsibleModule(argument_spec=element_spec,
supports_check_mode=False)
- dnac_pnp = DnacPnp(module)
- dnac_pnp.validate_input()
- state = dnac_pnp.get_state()
-
- dnac_pnp.get_want()
- dnac_pnp.get_have()
-
- if state == "merged":
- dnac_pnp.get_diff_merge()
-
- elif state == "deleted":
- dnac_pnp.get_diff_delete()
-
- module.exit_json(**dnac_pnp.result)
+ ccc_pnp = PnP(module)
+
+ state = ccc_pnp.params.get("state")
+ if state not in ccc_pnp.supported_states:
+ ccc_pnp.status = "invalid"
+ ccc_pnp.msg = "State {0} is invalid".format(state)
+ ccc_pnp.check_return_status()
+
+ ccc_pnp.validate_input().check_return_status()
+ config_verify = ccc_pnp.params.get("config_verify")
+
+ for config in ccc_pnp.validated_config:
+ ccc_pnp.reset_values()
+ ccc_pnp.get_want(config).check_return_status()
+ ccc_pnp.get_have().check_return_status()
+ ccc_pnp.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_pnp.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_pnp.result)
if __name__ == '__main__':
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_manager.py
new file mode 100644
index 000000000..e1b334f71
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_manager.py
@@ -0,0 +1,1301 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra, Madhan Sankaranarayanan, Rishita Chowdhary")
+
+DOCUMENTATION = r"""
+---
+module: pnp_workflow_manager
+short_description: Resource module for Site and PnP related functions
+description:
+ - Manage operations add device, claim device and unclaim device of Onboarding
+ Configuration(PnP) resource
+ - API to add device to pnp inventory and claim it to a site.
+ - API to delete device from the pnp inventory.
+ - API to reset the device from errored state.
+version_added: 6.6.0
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abinash Mishra (@abimishr) Madhan Sankaranarayanan (@madhansansel)
+ Rishita Chowdhary (@rishitachowdhary)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after
+ applying the playbook config.
+ type: bool
+ default: false
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices:
+ - merged
+ - deleted
+ default: merged
+ config:
+ description:
+ - List of details of device being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ device_info:
+ description:
+ - Provides the device-specific information required for adding devices
+ to the PnP database that are not already present.
+ - For adding a single device, the list should contain exactly one set
+ of device information. If a site name is also provided, the device
+ can be claimed immediately after being added.
+ - For bulk import, the list must contain information for more than one
+ device. Bulk import is intended solely for adding devices; claiming
+ must be performed with separate tasks or configurations.
+ type: list
+ required: true
+ elements: dict
+ suboptions:
+ hostname:
+ description:
+ - Defines the desired hostname for the PnP device after it has
+ been claimed.
+ - The hostname can only be assigned or changed during the claim
+ process, not during bulk or single device additions.
+ type: str
+ state:
+ description:
+ - Represents the onboarding state of the PnP device.
+ - Possible values are 'Unclaimed', 'Claimed', or 'Provisioned'.
+ type: str
+ pid:
+ description: Pnp Device's pid.
+ type: str
+ serial_number:
+ description: Pnp Device's serial_number.
+ type: str
+ is_sudi_required:
+ description: Sudi Authentication requiremnet's flag.
+ type: bool
+ site_name:
+ description: Name of the site for which device will be claimed.
+ type: str
+ project_name:
+ description: Name of the project under which the template is present
+ type: str
+ default: Onboarding Configuration
+ template_name:
+ description:
+ - Name of template to be configured on the device.
+ - Supported for EWLC from Cisco Catalyst Center release version
+ 2.3.7.x onwards.
+ type: str
+ template_params:
+ description:
+ - Parameter values for the parameterised templates.
+ - Each varibale has a value that needs to be passed as key-value pair
+ in the dictionary. We can pass values as
+ variable_name:variable_value.
+ - Supported for EWLC from Cisco Catalyst Center release version
+ 2.3.7.x onwards.
+ type: dict
+ image_name:
+ description: Name of image to be configured on the device
+ type: str
+ golden_image:
+ description: Is the image to be condifgured tagged as golden image
+ type: bool
+ pnp_type:
+ description: Specifies the device type for the Plug and Play (PnP) device. -
+ Options include 'Default', 'CatalystWLC', 'AccessPoint', or
+ 'StackSwitch'. - 'Default' is applicable to switches and routers. -
+ 'CatalystWLC' should be selected for 9800 series wireless controllers.
+ - 'AccessPoint' is used when claiming an access point. - 'StackSwitch'
+ should be chosen for a group of switches that operate as a single
+ switch, typically used in the access layer.
+ type: str
+ choices:
+ - Default
+ - CatalystWLC
+ - AccessPoint
+ - StackSwitch
+ default: Default
+ static_ip:
+ description: Management IP address of the Wireless Controller
+ type: str
+ subnet_mask:
+ description: Subnet Mask of the Management IP address of the Wireless Controller
+ type: str
+ gateway:
+ description: Gateway IP address of the Wireless Controller for getting pinged
+ type: str
+ vlan_id:
+ description: Vlan Id allocated for claimimg of Wireless Controller
+ type: str
+ ip_interface_name:
+ description: Specifies the interface name utilized for Plug and Play (PnP) by
+ the Wireless Controller. Ensure this interface is pre-configured on
+ the Controller prior to device claiming.
+ type: str
+ rf_profile:
+ description:
+ - Radio Frequecy (RF) profile of the AP being claimed.
+ - RF Profiles allow you to tune groups of APs that share a common
+ coverage zone together.
+ - They selectively change how Radio Resource Management will operate
+ the APs within that coverage zone.
+ - HIGH RF profile allows you to use more power and allows to join AP
+ with the client in an easier fashion.
+ - TYPICAL RF profile is a blend of moderate power and moderate
+ visibility to the client.
+ - LOW RF profile allows you to consume lesser power and has least
+ visibility to the client.
+ type: str
+ choices:
+ - HIGH
+ - LOW
+ - TYPICAL
+requirements:
+ - dnacentersdk == 2.6.10
+ - python >= 3.5
+notes:
+ - SDK Method used are device_onboarding_pnp.DeviceOnboardingPnp.add_device,
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_list,
+ device_onboarding_pnp.DeviceOnboardingPnp.claim_a_device_to_a_site,
+ device_onboarding_pnp.DeviceOnboardingPnp.delete_device_by_id_from_pnp,
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_count,
+ device_onboarding_pnp.DeviceOnboardingPnp.get_device_by_id,
+ device_onboarding_pnp.DeviceOnboardingPnp.update_device,
+ sites.Sites.get_site,
+ software_image_management_swim.SoftwareImageManagementSwim.get_software_image_details,
+ configuration_templates.ConfigurationTemplates.gets_the_templates_available
+ - Paths used are post /dna/intent/api/v1/onboarding/pnp-device post
+ /dna/intent/api/v1/onboarding/pnp-device/site-claim post
+ /dna/intent/api/v1/onboarding/pnp-device/{id} get
+ /dna/intent/api/v1/onboarding/pnp-device/count get
+ /dna/intent/api/v1/onboarding/pnp-device put /onboarding/pnp-device/${id}
+ get /dna/intent/api/v1/site get /dna/intent/api/v1/image/importation get
+ /dna/intent/api/v1/template-programmer/template
+
+"""
+
+EXAMPLES = r"""
+- name: Import multiple switches in bulk only
+ cisco.dnac.pnp_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ state: Unclaimed
+ pid: c9300-24P
+ is_sudi_required: False
+ - serial_number: QTC2320E0H9
+ state: Unclaimed
+ pid: c9300-24P
+ hostname: Test-123
+ - serial_number: ETC2320E0HB
+ state: Unclaimed
+ pid: c9300-24P
+
+- name: Add a new EWLC and claim it
+ cisco.dnac.pnp_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: FOX2639PAY7
+ hostname: New_WLC
+ state: Unclaimed
+ pid: C9800-CL-K9
+ site_name: Global/USA/San Francisco/BGL_18
+ template_name: Ansible_PNP_WLC
+ template_params:
+ hostname: IAC-EWLC-Claimed
+ project_name: Onboarding Configuration
+ image_name: C9800-40-universalk9_wlc.17.12.01.SPA.bin
+ golden_image: true
+ pnp_type: CatalystWLC
+ static_ip: 204.192.101.10
+ subnet_mask: 255.255.255.0
+ gateway: 204.192.101.1
+ vlan_id: 1101
+ ip_interface_name: TenGigabitEthernet0/0/0
+
+- name: Claim a pre-added switch, apply a template, and perform an image upgrade for a specific site
+ cisco.dnac.pnp_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: FJC271924EQ
+ hostname: Switch
+ state: Unclaimed
+ pid: C9300-48UXM
+ site_name: Global/USA/San Francisco/BGL_18
+ template_name: "Ansible_PNP_Switch"
+ image_name: cat9k_iosxe_npe.17.03.07.SPA.bin
+ project_name: Onboarding Configuration
+ template_params:
+ hostname: SJC-Switch-1
+ interface: TwoGigabitEthernet1/0/2
+
+- name: Remove multiple devices from the PnP dashboard safely (ignores non-existent devices)
+ cisco.dnac.pnp_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ state: deleted
+ config_verify: True
+ config:
+ - device_info:
+ - serial_number: QD2425L8M7
+ - serial_number: FTC2320E0HA
+ - serial_number: FKC2310E0HB
+"""
+
+RETURN = r"""
+#Case_1: When the device is claimed successfully.
+response_1:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+#Case_2: Given site/image/template/project not found or Device is not found for deletion
+response_2:
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+#Case_3: Error while deleting/claiming a device
+response_3:
+ description: A string with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": String,
+ "msg": String
+ }
+"""
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result
+)
+
+
+class PnP(DnacBase):
+ def __init__(self, module):
+ super().__init__(module)
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook. Checks the
+ configuration provided in the playbook against a predefined
+ specification to ensure it adheres to the expected structure
+ and data types.
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call
+ 'validate_input' on it.If the validation succeeds, 'self.status'
+ will be 'success'and 'self.validated_config' will contain the
+ validated configuration. If it fails, 'self.status' will be
+ 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ pnp_spec = {
+ 'template_name': {'type': 'str', 'required': False},
+ 'template_params': {'type': 'dict', 'required': False},
+ 'project_name': {'type': 'str', 'required': False,
+ 'default': 'Onboarding Configuration'},
+ 'site_name': {'type': 'str', 'required': False},
+ 'image_name': {'type': 'str', 'required': False},
+ 'golden_image': {'type': 'bool', 'required': False},
+ 'device_info': {'type': 'list', 'required': True,
+ 'elements': 'dict'},
+ 'pnp_type': {'type': 'str', 'required': False, 'default': 'Default'},
+ "rf_profile": {'type': 'str', 'required': False},
+ "static_ip": {'type': 'str', 'required': False},
+ "subnet_mask": {'type': 'str', 'required': False},
+ "gateway": {'type': 'str', 'required': False},
+ "vlan_id": {'type': 'str', 'required': False},
+ "ip_interface_name": {'type': 'str', 'required': False},
+ "sensorProfile": {'type': 'str', 'required': False}
+ }
+
+ # Validate pnp params
+ valid_pnp, invalid_params = validate_list_of_dicts(
+ self.config, pnp_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+ self.validated_config = valid_pnp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_pnp))
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_site_details(self):
+ """
+ Check whether the site exists or not, along with side id
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_exits: A boolean value indicating the existence of the site.
+ - site_id: The Id of the site i.e. required to claim device to site.
+ Example:
+ Post creation of the validated input, we this method gets the
+ site_id and checks whether the site exists or not
+ """
+
+ site_exists = False
+ site_id = None
+ response = None
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": self.want.get("site_name")},
+ )
+ except Exception:
+ self.log("Exception occurred as site \
+ '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log("Received site details \
+ for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG")
+ site = response.get("response")
+ if len(site) == 1:
+ site_id = site[0].get("id")
+ site_exists = True
+ self.log("Site Name: {1}, Site ID: {0}".format(site_id, self.want.get("site_name")), "INFO")
+
+ return (site_exists, site_id)
+
+ def get_site_type(self):
+ """
+ Fetches the type of site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_type: A string indicating the type of the
+ site (area/building/floor).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the site.
+ """
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": self.want.get("site_name")},
+ )
+ except Exception:
+ self.log("Exception occurred as \
+ site '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log("Received site details\
+ for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+ self.log("Site type for site name '{1}' : {0}".format(site_type, self.want.get("site_name")), "INFO")
+
+ return site_type
+
+ def get_pnp_params(self, params):
+ """
+ Store pnp parameters from the playbook for pnp processing in Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ - params: The validated params passed from the playbook.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - pnp_params: A dictionary containing all the values indicating
+ the type of the site (area/building/floor).
+ Example:
+ Post creation of the validated input, it fetches the required paramters
+ and stores it for further processing and calling the parameters in
+ other APIs.
+ """
+
+ params_list = params["device_info"]
+ device_info_list = []
+ for param in params_list:
+ device_dict = {}
+ param["serialNumber"] = param.pop("serial_number")
+ if "is_sudi_required" in param:
+ param["isSudiRequired"] = param.pop("is_sudi_required")
+ device_dict["deviceInfo"] = param
+ device_info_list.append(device_dict)
+
+ self.log("PnP paramters passed are {0}".format(str(params_list)), "INFO")
+ return device_info_list
+
+ def get_image_params(self, params):
+ """
+ Get image name and the confirmation whether it's tagged golden or not
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ - params: The validated params passed from the playbook.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - image_params: A dictionary containing all the values indicating
+ name of the image and its golden image status.
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ image_params = {
+ 'image_name': params.get('image_name'),
+ 'is_tagged_golden': params.get('golden_image')
+ }
+
+ self.log("Image details are {0}".format(str(image_params)), "INFO")
+ return image_params
+
+ def pnp_cred_failure(self, msg=None):
+ """
+ Method for failing discovery if there is any discrepancy in the PnP credentials
+ passed by the user
+ """
+
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ def get_claim_params(self):
+ """
+ Get the paramters needed for claiming the device to site.
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - claim_params: A dictionary needed for calling the POST call
+ for claim a device to a site API.
+ Example:
+ The stored dictionary can be used to call the API claim a device
+ to a site via SDK
+ """
+
+ imageinfo = {
+ 'imageId': self.have.get('image_id')
+ }
+ template_params = self.validated_config[0].get("template_params")
+ configinfo = {
+ 'configId': self.have.get('template_id'),
+ 'configParameters': [
+ {
+ 'key': '',
+ 'value': ''
+ }
+ ]
+ }
+
+ if configinfo.get("configId") and template_params:
+ if isinstance(template_params, dict):
+ if len(template_params) > 0:
+ configinfo["configParameters"] = []
+ for key, value in template_params.items():
+ config_dict = {
+ 'key': key,
+ 'value': value
+ }
+ configinfo["configParameters"].append(config_dict)
+
+ claim_params = {
+ 'deviceId': self.have.get('device_id'),
+ 'siteId': self.have.get('site_id'),
+ 'type': self.want.get('pnp_type'),
+ 'hostname': self.want.get('hostname'),
+ 'imageInfo': imageinfo,
+ 'configInfo': configinfo,
+ }
+
+ if claim_params["type"] == "CatalystWLC":
+ if not (self.validated_config[0].get('static_ip')):
+ msg = "A static IP address is required to claim a wireless controller. Please provide one."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('subnet_mask')):
+ msg = "Please provide a subnet mask to claim a wireless controller. "\
+ "This information is mandatory for the configuration."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('gateway')):
+ msg = "A gateway IP is required to claim a wireless controller. Please ensure to provide it."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('ip_interface_name')):
+ msg = "Please provide the Interface Name to claim a wireless controller. This information is necessary"\
+ " for making it a logical interface post claiming which can used to help manage the Wireless SSIDs "\
+ "broadcasted by the access points, manage the controller, access point and user data, plus more."
+ self.pnp_cred_failure(msg=msg)
+ if not (self.validated_config[0].get('vlan_id')):
+ msg = "Please provide the Vlan ID to claim a wireless controller. This is a required field for the process"\
+ " to create and set the specified port as trunk during PnP."
+ self.pnp_cred_failure(msg=msg)
+ claim_params["staticIP"] = self.validated_config[0]['static_ip']
+ claim_params["subnetMask"] = self.validated_config[0]['subnet_mask']
+ claim_params["gateway"] = self.validated_config[0]['gateway']
+ claim_params["vlanId"] = str(self.validated_config[0].get('vlan_id'))
+ claim_params["ipInterfaceName"] = self.validated_config[0]['ip_interface_name']
+
+ if claim_params["type"] == "AccessPoint":
+ if not (self.validated_config[0].get("rf_profile")):
+ msg = "The RF Profile for claiming an AP must be passed"
+ self.pnp_cred_failure(msg=msg)
+ claim_params["rfProfile"] = self.validated_config[0]["rf_profile"]
+
+ self.log("Paramters used for claiming are {0}".format(str(claim_params)), "INFO")
+ return claim_params
+
+ def get_reset_params(self):
+ """
+ Get the paramters needed for resetting the device in an errored state.
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - reset_params: A dictionary needed for calling the PUT call
+ for update device details API.
+ Example:
+ The stored dictionary can be used to call the API update device details
+ """
+
+ reset_params = {
+ "deviceResetList": [
+ {
+ "configList": [
+ {
+ "configId": self.have.get('template_id'),
+ "configParameters": [
+ {
+ "key": "",
+ "value": ""
+ }
+ ]
+ }
+ ],
+ "deviceId": self.have.get('device_id'),
+ "licenseLevel": "",
+ "licenseType": "",
+ "topOfStackSerialNumber": ""
+ }
+ ]
+ }
+
+ self.log("Paramters used for resetting from errored state:{0}".format(str(reset_params)), "INFO")
+ return reset_params
+
+ def get_have(self):
+ """
+ Get the current image, template and site details from the Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.image_response: A list of image passed by the user
+ - self.template_list: A list of template under project
+ - self.device_response: Gets the device_id and stores it
+ Example:
+ Stored paramters are used to call the APIs to get the current image,
+ template and site details to call the API for various types of devices
+ """
+ have = {}
+
+ # Claiming is only allowed for single addition of devices
+ if len(self.want.get('pnp_params')) == 1:
+ # check if given device exists in pnp inventory, store device Id
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": self.want.get("serial_number")}
+ )
+ self.log("Device details for the device with serial \
+ number '{0}': {1}".format(self.want.get("serial_number"), str(device_response)), "DEBUG")
+
+ if not (device_response and (len(device_response) == 1)):
+ self.log("Device with serial number {0} is not found in the inventory".format(self.want.get("serial_number")), "WARNING")
+ self.msg = "Adding the device to database"
+ self.status = "success"
+ self.have = have
+ have["device_found"] = False
+ return self
+
+ have["device_found"] = True
+ have["device_id"] = device_response[0].get("id")
+ self.log("Device Id: " + str(have["device_id"]))
+
+ if self.params.get("state") == "merged":
+ # check if given image exists, if exists store image_id
+ image_response = self.dnac_apply['exec'](
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params=self.want.get("image_params"),
+ )
+ image_list = image_response.get("response")
+ self.log("Image details obtained from the API 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+
+ # check if project has templates or not
+ template_list = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function='gets_the_templates_available',
+ params={"project_names": self.want.get("project_name")},
+ )
+ self.log("List of templates under the project '{0}': {1}".format(self.want.get("project_name"), str(template_list)), "DEBUG")
+
+ dev_details_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="get_device_by_id",
+ params={"id": device_response[0].get("id")}
+ )
+ self.log("Device details retrieved after calling the 'get_device_by_id' API: {0}".format(str(dev_details_response)), "DEBUG")
+ install_mode = dev_details_response.get("deviceInfo").get("mode")
+ self.log("Installation mode of the device with the serial no. '{0}':{1}".format(self.want.get("serial_number"), install_mode), "INFO")
+
+ # check if given site exits, if exists store current site info
+ site_exists = False
+ if not isinstance(self.want.get("site_name"), str) and \
+ not self.want.get('pnp_params')[0].get('deviceInfo'):
+ self.msg = "The site name must be a string"
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ site_name = self.want.get("site_name")
+ (site_exists, site_id) = self.get_site_details()
+
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("Site Exists: {0}\nSite Name: {1}\nSite ID: {2}".format(site_exists, site_name, site_id), "INFO")
+ if self.want.get("pnp_type") == "AccessPoint":
+ if self.get_site_type() != "floor":
+ self.msg = "Please ensure that the site type is specified as 'floor' when claiming an AP."\
+ " The site type is given as '{0}'. Please change the 'site_type' into 'floor' to "\
+ "proceed.".format(self.get_site_type())
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ if len(image_list) == 0:
+ self.msg = "The image '{0}' is either not present or not tagged as 'Golden' in the Cisco Catalyst Center."\
+ " Please verify its existence and its tag status.".format(self.validated_config[0].get("image_name"))
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ if len(image_list) == 1:
+ if install_mode != "INSTALL":
+ self.msg = "The system must be in INSTALL mode to upgrade the image. The current mode is '{0}'."\
+ " Please switch to INSTALL mode to proceed.".format(install_mode)
+ self.log(str(self.msg), "CRITICAL")
+ self.status = "failed"
+ return self
+
+ have["image_id"] = image_list[0].get("imageUuid")
+ self.log("Image ID for the image '{0}': {1}".format(self.want.get('image_params').get('image_name'), str(have["image_id"])), "INFO")
+
+ template_name = self.want.get("template_name")
+ if template_name:
+ if not (template_list and isinstance(template_list, list)):
+ self.msg = "Either project not found"\
+ " or it is Empty."
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ template_details = get_dict_result(template_list, 'name', template_name)
+ if template_details:
+ have["template_id"] = template_details.get("templateId")
+ else:
+ self.msg = "Template '{0}' is not found.".format(template_name)
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ else:
+ if not self.want.get('pnp_params')[0].get('deviceInfo'):
+ self.msg = "Either Site Name or Device details must be added."
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully collected all project and template \
+ parameters from Cisco Catalyst Center for comparison"
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.have = have
+ return self
+
+ def get_want(self, config):
+ """
+ Get all the image, template and site and pnp related
+ information from playbook that is needed to be created in Cisco Catalyst Center.
+
+ Parameters:
+ - self: The instance of the class containing the 'config'
+ attribute to be validated.
+ - config: validated config passed from the playbook
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.want: A dictionary of paramters obtained from the playbook.
+ - self.msg: A message indicating all the paramters from the playbook
+ are collected.
+ - self.status: Success.
+ Example:
+ It stores all the paramters passed from the playbook for further
+ processing before calling the APIs
+ """
+
+ self.want = {
+ 'image_params': self.get_image_params(config),
+ 'pnp_params': self.get_pnp_params(config),
+ 'pnp_type': config.get('pnp_type'),
+ 'site_name': config.get('site_name'),
+ 'project_name': config.get('project_name'),
+ 'template_name': config.get('template_name')
+ }
+ if len(self.want.get('pnp_params')) == 1:
+ self.want["serial_number"] = (
+ self.want['pnp_params'][0]["deviceInfo"].
+ get("serialNumber")
+ )
+ self.want["hostname"] = (
+ self.want['pnp_params'][0]["deviceInfo"].
+ get("hostname")
+ )
+
+ if self.want["pnp_type"] == "CatalystWLC":
+ self.want["static_ip"] = config.get('static_ip')
+ self.want["subnet_mask"] = config.get('subnet_mask')
+ self.want["gateway"] = config.get('gateway')
+ self.want["vlan_id"] = config.get('vlan_id')
+ self.want["ip_interface_name"] = config.get('ip_interface_name')
+
+ elif self.want["pnp_type"] == "AccessPoint":
+ self.want["rf_profile"] = config.get("rf_profile")
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_diff_merged(self):
+ """
+ If given device doesnot exist
+ then add it to pnp database and get the device id
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences. Based on the length of devices passed
+ it adds/claims or does both.
+ Description:
+ The function processes the differences and, depending on the
+ changes required, it may add, update,or resynchronize devices in
+ Cisco Catalyst Center. The updated results and status are stored in the
+ class instance for further use.
+ """
+
+ if not isinstance(self.want.get("pnp_params"), list):
+ self.msg = "Device Info must be passed as a list"
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ if len(self.want.get("pnp_params")) > 1:
+ devices_added = []
+ for device in self.want.get("pnp_params"):
+ multi_device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ self.log("Device details for serial number {0} \
+ obtained from the API 'get_device_list': {1}".format(device["deviceInfo"]["serialNumber"], str(multi_device_response)), "DEBUG")
+ if (multi_device_response and (len(multi_device_response) == 1)):
+ devices_added.append(device)
+ self.log("Details of the added device:{0}".format(str(device)), "INFO")
+ if (len(self.want.get("pnp_params")) - len(devices_added)) == 0:
+ self.result['response'] = []
+ self.result['msg'] = "Devices are already added"
+ self.log(self.result['msg'], "WARNING")
+ return self
+
+ bulk_list = [
+ device
+ for device in self.want.get("pnp_params")
+ if device not in devices_added
+ ]
+ bulk_params = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="import_devices_in_bulk",
+ params={"payload": bulk_list},
+ op_modifies=True,
+ )
+ self.log("Response from API 'import_devices_in_bulk' for imported devices: {0}".format(bulk_params), "DEBUG")
+ if len(bulk_params.get("successList")) > 0:
+ self.result['msg'] = "{0} device(s) imported successfully".format(
+ len(bulk_params.get("successList")))
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = bulk_params
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+ return self
+
+ self.msg = "Bulk import failed"
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self
+
+ provisioned_count_params = {
+ "serial_number": self.want.get("serial_number"),
+ "state": "Provisioned"
+ }
+
+ planned_count_params = {
+ "serial_number": self.want.get("serial_number"),
+ "state": "Planned"
+ }
+
+ if not self.have.get("device_found"):
+ if not self.want['pnp_params']:
+ self.msg = "Device needs to be added before claiming. Please add device_info"
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ if not self.want["site_name"]:
+ self.log("Adding device to pnp database", "INFO")
+ dev_add_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="add_device",
+ params=self.want.get('pnp_params')[0],
+ op_modifies=True,
+ )
+
+ self.have["deviceInfo"] = dev_add_response.get("deviceInfo")
+ self.log("Response from API 'add device' for a single device addition: {0}".format(str(dev_add_response)), "DEBUG")
+ if self.have["deviceInfo"]:
+ self.result['msg'] = "Only Device Added Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = dev_add_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+
+ else:
+ self.msg = "Device Addition Failed"
+ self.log(self.result['msg'], "CRITICAL")
+ self.status = "failed"
+
+ return self
+
+ else:
+ self.log("Adding device to pnp database")
+ dev_add_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="add_device",
+ params=self.want.get("pnp_params")[0],
+ op_modifies=True,
+ )
+ self.get_have().check_return_status()
+ self.have["deviceInfo"] = dev_add_response.get("deviceInfo")
+ self.log("Response from API 'add device' for single device addition: {0}".format(str(dev_add_response)), "DEBUG")
+ claim_params = self.get_claim_params()
+ claim_params["deviceId"] = dev_add_response.get("id")
+ claim_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='claim_a_device_to_a_site',
+ op_modifies=True,
+ params=claim_params,
+ )
+
+ self.log("Response from API 'claim a device to a site' for a single claiming: {0}".format(str(dev_add_response)), "DEBUG")
+ if claim_response.get("response") == "Device Claimed" and self.have["deviceInfo"]:
+ self.result['msg'] = "Device Added and Claimed Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = claim_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+
+ else:
+ self.msg = "Device Claim Failed"
+ self.log(self.result['msg'], "CRITICAL")
+ self.status = "failed"
+
+ return self
+
+ prov_dev_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_count',
+ op_modifies=True,
+ params=provisioned_count_params,
+ )
+ self.log("Response from 'get device count' API for provisioned devices: {0}".format(str(prov_dev_response)), "DEBUG")
+
+ plan_dev_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_count',
+ op_modifies=True,
+ params=planned_count_params,
+ )
+ self.log("Response from 'get_device_count' API for devices in planned state: {0}".format(str(plan_dev_response)), "DEBUG")
+
+ dev_details_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="get_device_by_id",
+ params={"id": self.have["device_id"]}
+ )
+ self.log("Response from 'get_device_by_id' API for device details: {0}".format(str(dev_details_response)), "DEBUG")
+
+ is_stack = False
+ if dev_details_response.get("deviceInfo").get("stack"):
+ is_stack = dev_details_response.get("deviceInfo").get("stack")
+ pnp_state = dev_details_response.get("deviceInfo").get("state")
+ self.log("PnP state of the device: {0}".format(pnp_state), "INFO")
+
+ if not self.want["site_name"]:
+ self.result['response'] = self.have.get("device_found")
+ self.result['msg'] = "Device is already added"
+ self.log(self.result['msg'], "WARNING")
+ return self
+
+ update_payload = {"deviceInfo": self.want.get('pnp_params')[0].get("deviceInfo")}
+ update_payload["deviceInfo"]["stack"] = is_stack
+
+ self.log("The request sent for 'update_device' API for device's config update: {0}".format(update_payload), "DEBUG")
+ update_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="update_device",
+ params={"id": self.have["device_id"],
+ "payload": update_payload},
+ op_modifies=True,
+ )
+ self.log("Response from 'update_device' API for device's config update: {0}".format(str(update_response)), "DEBUG")
+
+ if pnp_state == "Error":
+ reset_paramters = self.get_reset_params()
+ reset_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="reset_device",
+ params={"payload": reset_paramters},
+ op_modifies=True,
+ )
+ self.log("Response from 'update_device' API for errored state resolution: {0}".format(str(reset_response)), "DEBUG")
+ self.result['msg'] = "Device reset done Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = reset_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+
+ return self
+
+ if not (
+ prov_dev_response.get("response") == 0 and
+ plan_dev_response.get("response") == 0 and
+ pnp_state == "Unclaimed"
+ ):
+ self.result['response'] = self.have.get("device_found")
+ self.result['msg'] = "Device is already claimed"
+ self.log(self.result['msg'], "WARNING")
+ if update_response.get("deviceInfo"):
+ self.result['changed'] = True
+ return self
+
+ claim_params = self.get_claim_params()
+ self.log("Parameters for claiming the device: {0}".format(str(claim_params)), "DEBUG")
+
+ claim_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='claim_a_device_to_a_site',
+ op_modifies=True,
+ params=claim_params,
+ )
+ self.log("Response from 'claim_a_device_to_a_site' API for claiming: {0}".format(str(claim_response)), "DEBUG")
+ if claim_response.get("response") == "Device Claimed":
+ self.result['msg'] = "Only Device Claimed Successfully"
+ self.log(self.result['msg'], "INFO")
+ self.result['response'] = claim_response
+ self.result['diff'] = self.validated_config
+ self.result['changed'] = True
+
+ return self
+
+ def get_diff_deleted(self):
+ """
+ If the given device is added to pnp database
+ and is in unclaimed or failed state delete the
+ given device
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Here we pass a list of device info to be deleted
+ Returns:
+ self: An instance of the class with updated results and status based on
+ the deletion operation. It tells us the number of devices deleted if any of the devices
+ get deleted
+ Description:
+ This function is responsible for removing devices from the Cisco Catalyst Center PnP GUI and
+ pass new changes if devices are already deleted.
+ """
+ devices_deleted = []
+ devices_to_delete = self.want.get("pnp_params")[:]
+ for device in devices_to_delete:
+ multi_device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ self.log("Response from 'get_device_list' API for claiming: {0}".format(str(multi_device_response)), "DEBUG")
+ if multi_device_response and len(multi_device_response) == 1:
+ device_id = multi_device_response[0].get("id")
+
+ response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function="delete_device_by_id_from_pnp",
+ op_modifies=True,
+ params={"id": device_id},
+ )
+ self.log("Device details for the deleted device with \
+ serial number '{0}': {1}".format(device["deviceInfo"]["serialNumber"], str(response)), "DEBUG")
+ if response.get("deviceInfo", {}).get("state") == "Deleted":
+ devices_deleted.append(device["deviceInfo"]["serialNumber"])
+ self.want.get("pnp_params").remove(device)
+ else:
+ self.result['response'] = response
+ self.result['msg'] = "Error while deleting the device"
+ self.log(self.result['msg'], "CRITICAL")
+
+ if len(devices_deleted) > 0:
+ self.result['changed'] = True
+ self.result['response'] = devices_deleted
+ self.result['diff'] = self.want.get("pnp_params")
+ self.result['msg'] = "{0} Device(s) Deleted Successfully".format(len(devices_deleted))
+ self.log(self.result['msg'], "INFO")
+ else:
+ self.result['msg'] = "Device(s) Not Found"
+ self.log(self.result['msg'], "WARNING")
+ self.result['response'] = devices_deleted
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of PnP configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's PnP Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+ for device in self.want.get("pnp_params"):
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ if (device_response and (len(device_response) == 1)):
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "present in Cisco Catalyst Center and"
+ " addition verified.".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "INFO")
+
+ else:
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "not present in Cisco Catalyst Center"
+ "Center".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "WARNING")
+
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of PnP configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified device(s) exists in the Cisco Catalyst Center configuration's
+ PnP Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(config)), "INFO")
+ # Code to validate Cisco Catalyst Center config for deleted state
+ for device in self.want.get("pnp_params"):
+ device_response = self.dnac_apply['exec'](
+ family="device_onboarding_pnp",
+ function='get_device_list',
+ params={"serial_number": device["deviceInfo"]["serialNumber"]}
+ )
+ if not (device_response and (len(device_response) == 1)):
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "not present in the Cisco DNA"
+ "Center.".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "INFO")
+
+ else:
+ msg = (
+ "Requested Device with Serial No. {0} is "
+ "present in Cisco Catalyst Center".format(device["deviceInfo"]["serialNumber"]))
+ self.log(msg, "WARNING")
+
+ self.status = "success"
+ return self
+
+
+def main():
+ """
+ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+ ccc_pnp = PnP(module)
+
+ state = ccc_pnp.params.get("state")
+ if state not in ccc_pnp.supported_states:
+ ccc_pnp.status = "invalid"
+ ccc_pnp.msg = "State {0} is invalid".format(state)
+ ccc_pnp.check_return_status()
+
+ ccc_pnp.validate_input().check_return_status()
+ config_verify = ccc_pnp.params.get("config_verify")
+
+ for config in ccc_pnp.validated_config:
+ ccc_pnp.reset_values()
+ ccc_pnp.get_want(config).check_return_status()
+ ccc_pnp.get_have().check_return_status()
+ ccc_pnp.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_pnp.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_pnp.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/provision_intent.py b/ansible_collections/cisco/dnac/plugins/modules/provision_intent.py
new file mode 100644
index 000000000..4a3c8a228
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/provision_intent.py
@@ -0,0 +1,620 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra")
+
+DOCUMENTATION = r"""
+---
+module: provision_intent
+short_description: Resource module for provision functions
+description:
+- Manage operation related to wired and wireless provisioning
+- API to re-provision provisioned devices
+- API to un-provision provisioned devices
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.intent_params
+author: Abinash Mishra (@abimishr)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of DNAC after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of device being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ management_ip_address:
+ description: Management Ip Address .
+ type: str
+ required: true
+ site_name:
+ description: Name of site where the device needs to be added.
+ type: str
+ managed_ap_locations:
+ description: Location of the sites allocated for the APs
+ type: list
+ elements: str
+ dynamic_interfaces:
+ description: Interface details of the controller
+ type: list
+ elements: dict
+ suboptions:
+ interface_ip_address:
+ description: Ip Address allocated to the interface
+ type: str
+ interface_netmask_in_c_i_d_r:
+ description: Ip Address allocated to the interface
+ type: int
+ interface_gateway:
+ description: Ip Address allocated to the interface
+ type: str
+ lag_or_port_number:
+ description: Ip Address allocated to the interface
+ type: int
+ vlan_id:
+ description: Ip Address allocated to the interface
+ type: int
+ interface_name:
+ description: Ip Address allocated to the interface
+ type: str
+
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Methods used are
+ sites.Sites.get_site,
+ devices.Devices.get_network_device_by_ip,
+ task.Task.get_task_by_id,
+ sda.Sda.get_provisioned_wired_device,
+ sda.Sda.re_provision_wired_device,
+ sda.Sda.provision_wired_device,
+ wireless.Wireless.provision
+
+ - Paths used are
+ get /dna/intent/api/v1/site
+ get /dna/intent/api/v1/network-device/ip-address/{ipAddress}
+ get /dna/intent/api/v1/task/{taskId}
+ get /dna/intent/api/v1/business/sda/provision-device
+ put /dna/intent/api/v1/business/sda/provision-device
+ post /dna/intent/api/v1/business/sda/provision-device
+ post /dna/intent/api/v1/wireless/provision
+
+"""
+
+EXAMPLES = r"""
+- name: Create/Modify a new provision
+ cisco.dnac.provision_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ state: merged
+ config:
+ - site_name: string
+ management_ip_address: string
+ managed_ap_locations: list
+ dynamic_interfaces:
+ - vlan_id: integer
+ interface_name: string
+ interface_ip_address: string
+ interface_gateway: string
+ interface_netmask_in_c_i_d_r: integer
+ lag_or_port_number: integer
+
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of provision
+response_1:
+ description: A dictionary with details of provision is returned
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+# Case_2: Error while creating a provision
+response_2:
+ description: A list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+# Case_3: Already exists and requires no update
+response_3:
+ description: A dictionary with the exisiting details as returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": String,
+ "msg": String
+ }
+"""
+import time
+import re
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+
+
+class Dnacprovision(DnacBase):
+
+ """
+ Class containing member attributes for provision intent module
+ """
+ def __init__(self, module):
+ super().__init__(module)
+
+ def validate_input(self):
+
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and
+ 'self.validated_config' will contain the validated configuration. If it fails,
+ 'self.status' will be 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validattion"
+ self.status = "success"
+ return self
+
+ provision_spec = {
+ "management_ip_address": {'type': 'str', 'required': True},
+ "site_name": {'type': 'str', 'required': True},
+ "managed_ap_locations": {'type': 'list', 'required': False,
+ 'elements': 'str'},
+ "dynamic_interfaces": {'type': 'list', 'required': False,
+ 'elements': 'dict'}
+ }
+ # Validate provision params
+ valid_provision, invalid_params = validate_list_of_dicts(
+ self.config, provision_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_provision
+ self.log(str(valid_provision))
+ self.msg = "Successfully validated input"
+ self.status = "success"
+ return self
+
+ def get_dev_type(self):
+ """
+ Fetches the type of device (wired/wireless)
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - device_type: A string indicating the type of the
+ device (wired/wireless).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the device.
+ """
+
+ dev_response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": self.validated_config[0]["management_ip_address"]}
+ )
+
+ dev_dict = dev_response.get("response")
+ device_family = dev_dict["family"]
+
+ if device_family == "Wireless Controller":
+ device_type = "wireless"
+ elif device_family in ["Switches and Hubs", "Routers"]:
+ device_type = "wired"
+ else:
+ device_type = None
+ return device_type
+
+ def get_task_status(self, task_id=None):
+ """
+ Fetches the status of the task once any provision API is called
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - result: A dict indiacting wheter the task was succesful or not
+ Example:
+ Post creation of the provision task, this method fetheches the task
+ status.
+
+ """
+ result = False
+ params = {"task_id": task_id}
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function='get_task_by_id',
+ params=params,
+ )
+ response = response.response
+ if response.get('isError') or re.search(
+ 'failed', response.get('progress'), flags=re.IGNORECASE
+ ):
+ msg = 'Discovery task with id {0} has not completed - Reason: {1}'.format(
+ task_id, response.get("failureReason"))
+ self.module.fail_json(msg=msg)
+ return False
+
+ if response.get('progress') != 'In Progress':
+ result = True
+ break
+
+ time.sleep(3)
+ self.result.update(dict(discovery_task=response))
+ return result
+
+ def get_site_type(self, site_name=None):
+ """
+ Fetches the type of site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_type: A string indicating the type of the
+ site (area/building/floor).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the site.
+ """
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ except Exception:
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log(str(response))
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+
+ return site_type
+
+ def get_wired_params(self):
+ """
+ Prepares the payload for provisioning of the wired devices
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - wired_params: A dictionary containing all the values indicating
+ management IP address of the device and the hierarchy
+ of the site.
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ wired_params = {
+ "deviceManagementIpAddress": self.validated_config[0]["management_ip_address"],
+ "siteNameHierarchy": self.validated_config[0].get("site_name")
+ }
+
+ return wired_params
+
+ def get_wireless_params(self):
+ """
+ Prepares the payload for provisioning of the wireless devices
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - wireless_params: A list of dictionary containing all the values indicating
+ management IP address of the device, hierarchy
+ of the site, AP Location of the wireless controller and details
+ of the interface
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ wireless_params = [
+ {
+ "site": self.validated_config[0].get("site_name"),
+ "managedAPLocations": self.validated_config[0].get("managed_ap_locations"),
+ }
+ ]
+ for ap_loc in wireless_params[0]["managedAPLocations"]:
+ if self.get_site_type(site_name=ap_loc) != "floor":
+ self.module.fail_json(msg="Managed AP Location must be a floor", response=[])
+
+ wireless_params[0]["dynamicInterfaces"] = []
+ for interface in self.validated_config[0].get("dynamic_interfaces"):
+ interface_dict = {
+ "interfaceIPAddress": interface.get("interface_ip_address"),
+ "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_c_i_d_r"),
+ "interfaceGateway": interface.get("interface_gateway"),
+ "lagOrPortNumber": interface.get("lag_or_port_number"),
+ "vlanId": interface.get("vlan_id"),
+ "interfaceName": interface.get("interface_name")
+ }
+ wireless_params[0]["dynamicInterfaces"].append(interface_dict)
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"management_ip_address": self.validated_config[0]["management_ip_address"]}
+ )
+
+ wireless_params[0]["deviceName"] = response.get("response")[0].get("hostname")
+ return wireless_params
+
+ def get_want(self):
+ """
+ Get all provision related informantion from the playbook
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ config: validated config passed from the playbook
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.want: A dictionary of paramters obtained from the playbook
+ - self.msg: A message indicating all the paramters from the playbook are
+ collected
+ - self.status: Success
+ Example:
+ It stores all the paramters passed from the playbook for further processing
+ before calling the APIs
+ """
+
+ self.want = {}
+ self.want["device_type"] = self.get_dev_type()
+ if self.want["device_type"] == "wired":
+ self.want["prov_params"] = self.get_wired_params()
+ elif self.want["device_type"] == "wireless":
+ self.want["prov_params"] = self.get_wireless_params()
+ else:
+ self.log("Passed devices are neither wired or wireless devices")
+
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.status = "success"
+ return self
+
+ def get_diff_merged(self):
+ """
+ Add to provision database
+ Args:
+ self: An instance of a class used for interacting with Cisco DNA Center.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences.
+ Description:
+ The function processes the differences and, depending on the
+ changes required, it may add, update,or resynchronize devices in
+ Cisco DNA Center. The updated results and status are stored in the
+ class instance for further use.
+ """
+
+ device_type = self.want.get("device_type")
+ if device_type == "wired":
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_\
+ ip_address":
+ self.validated_config[0]["management_ip_address"]
+ },
+ )
+ except Exception:
+ status_response = {}
+
+ status = status_response.get("status")
+
+ if status == "success":
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="re_provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ else:
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+
+ elif device_type == "wireless":
+ response = self.dnac_apply['exec'](
+ family="wireless",
+ function="provision",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+
+ else:
+ self.result['msg'] = "Passed device is neither wired nor wireless"
+ self.result['response'] = self.want["prov_params"]
+ return self
+
+ task_id = response.get("taskId")
+ provision_info = self.get_task_status(task_id=task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Provision done Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = task_id
+
+ return self
+
+ def get_diff_deleted(self):
+ """
+ Delete from provision database
+ Args:
+ self: An instance of a class used for interacting with Cisco DNA Center
+ Returns:
+ self: An instance of the class with updated results and status based on
+ the deletion operation.
+ Description:
+ This function is responsible for removing devices from the Cisco DNA Center PnP GUI and
+ raise Exception if any error occured.
+ """
+
+ device_type = self.want.get("device_type")
+
+ if device_type != "wired":
+ self.result['msg'] = "APIs are not supported for the device"
+ return self
+
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_\
+ ip_address":
+ self.validated_config[0]["management_ip_address"]
+ },
+ )
+
+ except Exception:
+ status_response = {}
+
+ status = status_response.get("status")
+
+ if status != "success":
+ self.result['msg'] = "Passed IP address is not provisioned"
+ self.result['response'] = self.want["prov_params"]
+ return self
+
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="delete_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_\
+ ip_address":
+ self.validated_config[0]["management_ip_address"]
+ },
+ )
+
+ task_id = response.get("taskId")
+ deletion_info = self.get_task_status(task_id=task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Deletion done Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = task_id
+
+ return self
+
+
+def main():
+
+ """
+ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+ dnac_provision = Dnacprovision(module)
+
+ state = dnac_provision.params.get("state")
+ if state not in dnac_provision.supported_states:
+ dnac_provision.status = "invalid"
+ dnac_provision.msg = "State {0} is invalid".format(state)
+ dnac_provision.check_return_status()
+
+ dnac_provision.validate_input().check_return_status()
+
+ for config in dnac_provision.validated_config:
+ dnac_provision.reset_values()
+ dnac_provision.get_want().check_return_status()
+ dnac_provision.get_diff_state_apply[state]().check_return_status()
+
+ module.exit_json(**dnac_provision.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py
new file mode 100644
index 000000000..27ae58141
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py
@@ -0,0 +1,737 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: provision_workflow_manager
+short_description: Resource module for provision related functions
+description:
+- Manage operations related to wired and wireless provisioning
+- API to re-provision provisioned devices
+- API to un-provision provisioned devices
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abinash Mishra (@abimishr)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of device being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ management_ip_address:
+ description: Management Ip Address .
+ type: str
+ required: true
+ site_name_hierarchy:
+ description: Name of site where the device needs to be added.
+ type: str
+ managed_ap_locations:
+ description: Location of the sites allocated for the APs
+ type: list
+ elements: str
+ dynamic_interfaces:
+ description: Interface details of the controller
+ type: list
+ elements: dict
+ suboptions:
+ interface_ip_address:
+ description: Ip Address allocated to the interface
+ type: str
+ interface_netmask_in_c_i_d_r:
+ description: Ip Address allocated to the interface
+ type: int
+ interface_gateway:
+ description: Ip Address allocated to the interface
+ type: str
+ lag_or_port_number:
+ description: Ip Address allocated to the interface
+ type: int
+ vlan_id:
+ description: Ip Address allocated to the interface
+ type: int
+ interface_name:
+ description: Ip Address allocated to the interface
+ type: str
+
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Methods used are
+ sites.Sites.get_site,
+ devices.Devices.get_network_device_by_ip,
+ task.Task.get_task_by_id,
+ sda.Sda.get_provisioned_wired_device,
+ sda.Sda.re_provision_wired_device,
+ sda.Sda.provision_wired_device,
+ wireless.Wireless.provision
+
+ - Paths used are
+ get /dna/intent/api/v1/site
+ get /dna/intent/api/v1/network-device/ip-address/{ipAddress}
+ get /dna/intent/api/v1/task/{taskId}
+ get /dna/intent/api/v1/business/sda/provision-device
+ put /dna/intent/api/v1/business/sda/provision-device
+ post /dna/intent/api/v1/business/sda/provision-device
+ post /dna/intent/api/v1/wireless/provision
+
+"""
+
+EXAMPLES = r"""
+- name: Create/Modify a new provision
+ cisco.dnac.provision_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ state: merged
+ config:
+ - site_name_hierarchy: string
+ management_ip_address: string
+ managed_ap_locations: list
+ dynamic_interfaces:
+ - vlan_id: integer
+ interface_name: string
+ interface_ip_address: string
+ interface_gateway: string
+ interface_netmask_in_c_i_d_r: integer
+ lag_or_port_number: integer
+
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of provision
+response_1:
+ description: A dictionary with details of provision is returned
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+# Case_2: Error while creating a provision
+response_2:
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+# Case_3: Already exists and requires no update
+response_3:
+ description: A dictionary with the exisiting details as returned by the Cisco Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": String,
+ "msg": String
+ }
+"""
+import time
+import re
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+
+
+class Provision(DnacBase):
+
+ """
+ Class containing member attributes for provision workflow module
+ """
+ def __init__(self, module):
+ super().__init__(module)
+
+ def validate_input(self, state=None):
+
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and
+ 'self.validated_config' will contain the validated configuration. If it fails,
+ 'self.status' will be 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validattion"
+ self.status = "success"
+ return self
+
+ provision_spec = {
+ "management_ip_address": {'type': 'str', 'required': True},
+ "site_name_hierarchy": {'type': 'str', 'required': False},
+ "managed_ap_locations": {'type': 'list', 'required': False,
+ 'elements': 'str'},
+ "dynamic_interfaces": {'type': 'list', 'required': False,
+ 'elements': 'dict'}
+ }
+ if state == "merged":
+ provision_spec["site_name_hierarchy"] = {'type': 'str', 'required': True}
+
+ # Validate provision params
+ valid_provision, invalid_params = validate_list_of_dicts(
+ self.config, provision_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_provision
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_provision))
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+ return self
+
+ def get_dev_type(self):
+ """
+ Fetches the type of device (wired/wireless)
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - device_type: A string indicating the type of the
+ device (wired/wireless).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the device.
+ """
+
+ dev_response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": self.validated_config[0]["management_ip_address"]}
+ )
+
+ self.log("The device response from 'get_network_device_by_ip' API is {0}".format(str(dev_response)), "DEBUG")
+ dev_dict = dev_response.get("response")
+ device_family = dev_dict["family"]
+
+ if device_family == "Wireless Controller":
+ device_type = "wireless"
+ elif device_family in ["Switches and Hubs", "Routers"]:
+ device_type = "wired"
+ else:
+ device_type = None
+ self.log("The device type is {0}".format(device_type), "INFO")
+ return device_type
+
+ def get_task_status(self, task_id=None):
+ """
+ Fetches the status of the task once any provision API is called
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - result: A dict indiacting wheter the task was succesful or not
+ Example:
+ Post creation of the provision task, this method fetheches the task
+ status.
+
+ """
+ result = False
+ params = {"task_id": task_id}
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function='get_task_by_id',
+ params=params,
+ )
+ self.log("Response collected from 'get_task_by_id' API is {0}".format(str(response)), "DEBUG")
+ response = response.response
+ self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO")
+ if response.get('isError') or re.search(
+ 'failed', response.get('progress'), flags=re.IGNORECASE
+ ):
+ msg = 'Provision task with id {0} has not completed - Reason: {1}'.format(
+ task_id, response.get("failureReason"))
+ self.module.fail_json(msg=msg)
+ return False
+
+ if response.get('progress') != 'In Progress':
+ result = True
+ break
+
+ time.sleep(3)
+ self.result.update(dict(provision_task=response))
+ return result
+
+ def get_site_type(self, site_name_hierarchy=None):
+ """
+ Fetches the type of site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - site_type: A string indicating the type of the
+ site (area/building/floor).
+ Example:
+ Post creation of the validated input, we this method gets the
+ type of the site.
+ """
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": site_name_hierarchy},
+ )
+ except Exception:
+ self.log("Exception occurred as \
+ site '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log("Received site details\
+ for '{0}': {1}".format(site_name_hierarchy, str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+ for item in site_additional_info:
+ if item["nameSpace"] == "Location":
+ site_type = item.get("attributes").get("type")
+ self.log("Site type for site name '{1}' : {0}".format(site_type, site_name_hierarchy), "INFO")
+
+ return site_type
+
+ def get_wired_params(self):
+ """
+ Prepares the payload for provisioning of the wired devices
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - wired_params: A dictionary containing all the values indicating
+ management IP address of the device and the hierarchy
+ of the site.
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ wired_params = {
+ "deviceManagementIpAddress": self.validated_config[0]["management_ip_address"],
+ "siteNameHierarchy": self.validated_config[0].get("site_name_hierarchy")
+ }
+
+ self.log("Parameters collected for the provisioning of wired device:{0}".format(wired_params), "INFO")
+ return wired_params
+
+ def get_wireless_params(self):
+ """
+ Prepares the payload for provisioning of the wireless devices
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - wireless_params: A list of dictionary containing all the values indicating
+ management IP address of the device, hierarchy
+ of the site, AP Location of the wireless controller and details
+ of the interface
+ Example:
+ Post creation of the validated input, it fetches the required
+ paramters and stores it for further processing and calling the
+ parameters in other APIs.
+ """
+
+ wireless_params = [
+ {
+ "site": self.validated_config[0].get("site_name_hierarchy"),
+ "managedAPLocations": self.validated_config[0].get("managed_ap_locations"),
+ }
+ ]
+ for ap_loc in wireless_params[0]["managedAPLocations"]:
+ if self.get_site_type(site_name_hierarchy=ap_loc) != "floor":
+ self.log("Managed AP Location must be a floor", "CRITICAL")
+ self.module.fail_json(msg="Managed AP Location must be a floor", response=[])
+
+ wireless_params[0]["dynamicInterfaces"] = []
+ for interface in self.validated_config[0].get("dynamic_interfaces"):
+ interface_dict = {
+ "interfaceIPAddress": interface.get("interface_ip_address"),
+ "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_c_i_d_r"),
+ "interfaceGateway": interface.get("interface_gateway"),
+ "lagOrPortNumber": interface.get("lag_or_port_number"),
+ "vlanId": interface.get("vlan_id"),
+ "interfaceName": interface.get("interface_name")
+ }
+ wireless_params[0]["dynamicInterfaces"].append(interface_dict)
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"management_ip_address": self.validated_config[0]["management_ip_address"]}
+ )
+
+ self.log("Response collected from 'get_network_device_by_ip' is:{0}".format(str(response)), "DEBUG")
+ wireless_params[0]["deviceName"] = response.get("response")[0].get("hostname")
+ self.log("Parameters collected for the provisioning of wireless device:{0}".format(wireless_params), "INFO")
+ return wireless_params
+
+ def get_want(self):
+ """
+ Get all provision related informantion from the playbook
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ config: validated config passed from the playbook
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.want: A dictionary of paramters obtained from the playbook
+ - self.msg: A message indicating all the paramters from the playbook are
+ collected
+ - self.status: Success
+ Example:
+ It stores all the paramters passed from the playbook for further processing
+ before calling the APIs
+ """
+
+ self.want = {}
+ self.want["device_type"] = self.get_dev_type()
+ if self.want["device_type"] == "wired":
+ self.want["prov_params"] = self.get_wired_params()
+ elif self.want["device_type"] == "wireless":
+ self.want["prov_params"] = self.get_wireless_params()
+ else:
+ self.log("Passed devices are neither wired or wireless devices", "WARNING")
+
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ return self
+
+ def get_diff_merged(self):
+ """
+ Add to provision database
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences.
+ Description:
+ The function processes the differences and, depending on the
+ changes required, it may add, update,or resynchronize devices in
+ Cisco Catalyst Center. The updated results and status are stored in the
+ class instance for further use.
+ """
+
+ device_type = self.want.get("device_type")
+ if device_type == "wired":
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+ except Exception:
+ status_response = {}
+ self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG")
+ status = status_response.get("status")
+ self.log("The provisioned status of the wired device is {0}".format(status), "INFO")
+
+ if status == "success":
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="re_provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ self.log("Reprovisioning response collected from 're_provision_wired_device' API is: {0}".format(response), "DEBUG")
+ else:
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ self.log("Provisioning response collected from 'provision_wired_device' API is: {0}".format(response), "DEBUG")
+
+ elif device_type == "wireless":
+ response = self.dnac_apply['exec'](
+ family="wireless",
+ function="provision",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ self.log("Wireless provisioning response collected from 'provision' API is: {0}".format(response), "DEBUG")
+
+ else:
+ self.result['msg'] = "Passed device is neither wired nor wireless"
+ self.log(self.result['msg'], "ERROR")
+ self.result['response'] = self.want["prov_params"]
+ return self
+
+ task_id = response.get("taskId")
+ provision_info = self.get_task_status(task_id=task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Provision done Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = task_id
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def get_diff_deleted(self):
+ """
+ Delete from provision database
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center
+ Returns:
+ self: An instance of the class with updated results and status based on
+ the deletion operation.
+ Description:
+ This function is responsible for removing devices from the Cisco Catalyst Center PnP GUI and
+ raise Exception if any error occured.
+ """
+
+ device_type = self.want.get("device_type")
+
+ if device_type != "wired":
+ self.result['msg'] = "APIs are not supported for the device"
+ self.log(self.result['msg'], "CRITICAL")
+ return self
+
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+
+ except Exception:
+ status_response = {}
+ self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG")
+ status = status_response.get("status")
+ self.log("The provisioned status of the wired device is {0}".format(status), "INFO")
+
+ if status != "success":
+ self.result['msg'] = "Device associated with the passed IP address is not provisioned"
+ self.log(self.result['msg'], "CRITICAL")
+ self.result['response'] = self.want["prov_params"]
+ return self
+
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="delete_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+ self.log("Response collected from the 'delete_provisioned_wired_device' API is : {0}".format(str(response)), "DEBUG")
+
+ task_id = response.get("taskId")
+ deletion_info = self.get_task_status(task_id=task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Deletion done Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = task_id
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ def verify_diff_merged(self):
+ """
+ Verify the merged status(Creation/Updation) of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's Inventory Database in the provisioned state.
+ """
+
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+
+ device_type = self.want.get("device_type")
+ if device_type == "wired":
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+ except Exception:
+ status_response = {}
+ self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG")
+ status = status_response.get("status")
+ self.log("The provisioned status of the wired device is {0}".format(status), "INFO")
+
+ if status == "success":
+ self.log("Requested wired device is alread provisioned", "INFO")
+
+ else:
+ self.log("Requested wired device is not provisioned", "INFO")
+
+ else:
+ self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wired devices")
+ self.status = "success"
+
+ return self
+
+ def verify_diff_deleted(self):
+ """
+ Verify the deletion status of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified discovery(s) exists in the Cisco Catalyst Center configuration's
+ Inventory Database in the provisioned state.
+ """
+
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+
+ device_type = self.want.get("device_type")
+ if device_type == "wired":
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+ except Exception:
+ status_response = {}
+ self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG")
+ status = status_response.get("status")
+ self.log("The provisioned status of the wired device is {0}".format(status), "INFO")
+
+ if status == "success":
+ self.log("Requested wired device is in provisioned state and is not unprovisioned", "INFO")
+
+ else:
+ self.log("Requested wired device is unprovisioned", "INFO")
+
+ else:
+ self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wired devices")
+ self.status = "success"
+
+ return self
+
+
+def main():
+
+ """
+ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+ ccc_provision = Provision(module)
+ config_verify = ccc_provision.params.get("config_verify")
+
+ state = ccc_provision.params.get("state")
+ if state not in ccc_provision.supported_states:
+ ccc_provision.status = "invalid"
+ ccc_provision.msg = "State {0} is invalid".format(state)
+ ccc_provision.check_return_status()
+
+ ccc_provision.validate_input(state=state).check_return_status()
+
+ for config in ccc_provision.validated_config:
+ ccc_provision.reset_values()
+ ccc_provision.get_want().check_return_status()
+ ccc_provision.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_provision.verify_diff_state_apply[state]().check_return_status()
+
+ module.exit_json(**ccc_provision.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py b/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
index 53d7a9b3a..3f520a697 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
@@ -28,7 +28,7 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!get-permissions-api
notes:
- SDK Method used are
- userand_roles.UserandRoles.get_permissions_ap_i,
+ user_and_roles.UserandRoles.get_permissions_ap_i,
- Paths used are
get /dna/system/api/v1/role/permissions,
diff --git a/ansible_collections/cisco/dnac/plugins/modules/roles_info.py b/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
index 279f9ed10..3e719be1a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
@@ -28,7 +28,7 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!get-roles-api
notes:
- SDK Method used are
- userand_roles.UserandRoles.get_roles_ap_i,
+ user_and_roles.UserandRoles.get_roles_ap_i,
- Paths used are
get /dna/system/api/v1/roles,
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_intent.py b/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
index 1ce744693..751d520be 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
@@ -7,7 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary")
+__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary, Abhishek Maheshwari")
DOCUMENTATION = r"""
---
@@ -23,77 +23,99 @@ extends_documentation_fragment:
- cisco.dnac.intent_params
author: Madhan Sankaranarayanan (@madhansansel)
Rishita Chowdhary (@rishitachowdhary)
+ Abhishek Maheshwari (@abhishekmaheshwari)
options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
state:
- description: The state of DNAC after module completion.
+ description: The state of Catalyst Center after module completion.
type: str
choices: [ merged, deleted ]
default: merged
config:
- description:
- - List of details of site being managed.
+ description: It represents a list of details for creating/managing/deleting sites, including areas, buildings, and floors.
type: list
elements: dict
- required: true
+ required: True
suboptions:
- type:
+ site_type:
description: Type of site to create/update/delete (eg area, building, floor).
type: str
site:
- description: Site Details.
+ description: Contains details about the site being managed including areas, buildings and floors.
type: dict
suboptions:
area:
- description: Site Create's area.
+ description: Configuration details for creating or managing an area within a site.
type: dict
suboptions:
name:
- description: Name of the area (eg Area1).
+ description: Name of the area to be created or managed (e.g., "Area1").
type: str
- parentName:
- description: Parent name of the area to be created.
+ parent_name:
+ description: The full name of the parent under which the area will be created/managed/deleted (e.g., "Global/USA").
type: str
building:
- description: Building Details.
+ description: Configuration details required for creating or managing a building within a site.
type: dict
suboptions:
address:
- description: Address of the building to be created.
+ description: Physical address of the building that is to be created or managed.
type: str
latitude:
- description: Latitude coordinate of the building (eg 37.338).
- type: int
+ description: Geographical latitude coordinate of the building. For example, use 37.338 for a location in San Jose, California.
+ Valid values range from -90.0 to +90.0 degrees.
+ type: float
longitude:
- description: Longitude coordinate of the building (eg -121.832).
- type: int
+ description: Geographical longitude coordinate of the building. For example, use -121.832 for a location in San Jose, California.
+ Valid values range from -180.0 to +180.0 degrees.
+ type: float
name:
- description: Name of the building (eg building1).
+ description: Name of the building (e.g., "Building1").
type: str
- parentName:
- description: Parent name of building to be created.
+ parent_name:
+ description: Hierarchical parent path of the building, indicating its location within the site (e.g., "Global/USA/San Francisco").
type: str
floor:
- description: Site Create's floor.
+ description: Configuration details required for creating or managing a floor within a site.
type: dict
suboptions:
height:
- description: Height of the floor (eg 15).
- type: int
+ description: Height of the floor in feet (e.g., 15.23).
+ type: float
length:
- description: Length of the floor (eg 100).
- type: int
+ description: Length of the floor in feet (e.g., 100.11).
+ type: float
name:
- description: Name of the floor (eg floor-1).
+ description: Name of the floor (e.g., "Floor-1").
type: str
- parentName:
- description: Parent name of the floor to be created.
+ parent_name:
+ description: Hierarchical parent path of the floor, indicating its location within the site (e.g.,
+ "Global/USA/San Francisco/BGL_18").
type: str
- rfModel:
- description: Type of floor. Allowed values are 'Cubes And Walled Offices',
- 'Drywall Office Only', 'Indoor High Ceiling', 'Outdoor Open Space'.
+ rf_model:
+ description: The RF (Radio Frequency) model type for the floor, which is essential for simulating and optimizing wireless
+ network coverage. Select from the following allowed values, which describe different environmental signal propagation
+ characteristics.
+ Type of floor (allowed values are 'Cubes And Walled Offices', 'Drywall Office Only', 'Indoor High Ceiling',
+ 'Outdoor Open Space').
+ Cubes And Walled Offices - This RF model typically represents indoor areas with cubicles or walled offices, where
+ radio signals may experience attenuation due to walls and obstacles.
+ Drywall Office Only - This RF model indicates an environment with drywall partitions, commonly found in office spaces,
+ which may have moderate signal attenuation.
+ Indoor High Ceiling - This RF model is suitable for indoor spaces with high ceilings, such as auditoriums or atriums,
+ where signal propagation may differ due to the height of the ceiling.
+ Outdoor Open Space - This RF model is used for outdoor areas with open spaces, where signal propagation is less obstructed
+ and may follow different patterns compared to indoor environments.
type: str
width:
- description: Width of the floor (eg 100).
+ description: Width of the floor in feet (e.g., 100.22).
+ type: float
+ floor_number:
+ description: Floor number within the building site (e.g., 5). This value can only be specified during the creation of the
+ floor and cannot be modified afterward.
type: int
requirements:
@@ -112,6 +134,25 @@ notes:
"""
EXAMPLES = r"""
+- name: Create a new area site
+ cisco.dnac.site_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ area:
+ name: Test
+ parent_name: Global/India
+ site_type: area
+
- name: Create a new building site
cisco.dnac.site_intent:
dnac_host: "{{dnac_host}}"
@@ -121,22 +162,89 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
dnac_log: "{{dnac_log}}"
+ state: merged
config:
- site:
- building:
- address: string
- latitude: 0
- longitude: 0
- name: string
- parentName: string
- type: string
+ - site:
+ building:
+ name: Building_1
+ parent_name: Global/India
+ address: Bengaluru, Karnataka, India
+ latitude: 24.12
+ longitude: 23.45
+ site_type: building
+
+- name: Create a Floor site under the building
+ cisco.dnac.site_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ length: 75.76
+ width: 35.54
+ height: 30.12
+ rf_model: Cubes And Walled Offices
+ floor_number: 2
+ site_type: floor
+
+- name: Updating the Floor details under the building
+ cisco.dnac.site_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ length: 75.76
+ width: 35.54
+ height: 30.12
+ site_type: floor
+
+- name: Deleting any site you need site name and parent name
+ cisco.dnac.site_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: deleted
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ site_type: floor
"""
RETURN = r"""
#Case_1: Site is successfully created/updated/deleted
response_1:
- description: A dictionary with API execution details as returned by the Cisco DNAC Python SDK
+ description: A dictionary with API execution details as returned by the Cisco Catalyst Center Python SDK
returned: always
type: dict
sample: >
@@ -177,7 +285,7 @@ response_2:
#Case_3: Error while creating/updating/deleting site
response_3:
- description: A dictionary with API execution details as returned by the Cisco DNAC Python SDK
+ description: A dictionary with API execution details as returned by the Cisco Catalyst Center Python SDK
returned: always
type: dict
sample: >
@@ -202,7 +310,7 @@ response_3:
#Case_4: Site not found when atempting to delete site
response_4:
- description: A list with the response returned by the Cisco DNAC Python
+ description: A list with the response returned by the Cisco Catalyst Center Python
returned: always
type: list
sample: >
@@ -214,81 +322,95 @@ response_4:
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
- DNACSDK,
+ DnacBase,
validate_list_of_dicts,
- log,
get_dict_result,
- dnac_compare_equality,
)
-import copy
floor_plan = {
- '57057': 'CUBES AND WALLED OFFICES',
- '57058': 'DRYWELL OFFICE ONLY',
- '41541500': 'FREE SPACE',
- '57060': 'INDOOR HIGH CEILING',
- '57059': 'OUTDOOR OPEN SPACE'
+ '101101': 'Cubes And Walled Offices',
+ '101102': 'Drywall Office Only',
+ '101105': 'Free Space',
+ '101104': 'Indoor High Ceiling',
+ '101103': 'Outdoor Open Space'
}
-class DnacSite:
+class DnacSite(DnacBase):
+ """Class containing member attributes for site intent module"""
def __init__(self, module):
- self.module = module
- self.params = module.params
- self.config = copy.deepcopy(module.params.get("config"))
- self.have = {}
- self.want_create = {}
- self.diff_create = []
- self.validated = []
- dnac_params = self.get_dnac_params(self.params)
- log(str(dnac_params))
- self.dnac = DNACSDK(params=dnac_params)
- self.log = dnac_params.get("dnac_log")
-
- self.result = dict(changed=False, diff=[], response=[], warnings=[])
-
- def get_state(self):
- return self.params.get("state")
+ super().__init__(module)
+ self.supported_states = ["merged", "deleted"]
def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed', and
+ 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.status = "success"
+ self.msg = "Configuration is not available in the playbook for validation"
+ self.log(self.msg, "ERROR")
+ return self
+
temp_spec = dict(
type=dict(required=False, type='str'),
site=dict(required=True, type='dict'),
)
+ self.config = self.camel_to_snake_case(self.config)
+ self.config = self.update_site_type_key(self.config)
+
+ # Validate site params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
- if self.config:
- msg = None
- # Validate site params
- valid_temp, invalid_params = validate_list_of_dicts(
- self.config, temp_spec
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params)
)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
- if invalid_params:
- msg = "Invalid parameters in playbook: {0}".format(
- "\n".join(invalid_params)
- )
- self.module.fail_json(msg=msg)
-
- self.validated = valid_temp
-
- if self.log:
- log(str(valid_temp))
- log(str(self.validated))
-
- def get_dnac_params(self, params):
- dnac_params = dict(
- dnac_host=params.get("dnac_host"),
- dnac_port=params.get("dnac_port"),
- dnac_username=params.get("dnac_username"),
- dnac_password=params.get("dnac_password"),
- dnac_verify=params.get("dnac_verify"),
- dnac_debug=params.get("dnac_debug"),
- dnac_log=params.get("dnac_log")
- )
- return dnac_params
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
def get_current_site(self, site):
+ """
+ Get the current site information.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - site (list): A list containing information about the site.
+ Returns:
+ - dict: A dictionary containing the extracted site information.
+ Description:
+ This method extracts information about the current site based on
+ the provided 'site' list. It determines the type of the site
+ (area, building, or floor) and retrieves specific details
+ accordingly. The resulting dictionary includes the type, site
+ details, and the site ID.
+ """
+
site_info = {}
location = get_dict_result(site[0].get("additionalInfo"), 'nameSpace', "Location")
@@ -307,9 +429,10 @@ class DnacSite:
building=dict(
name=site[0].get("name"),
parentName=site[0].get("siteNameHierarchy").split("/" + site[0].get("name"))[0],
- address=location.get("attributes").get("address", ""),
+ address=location.get("attributes").get("address"),
latitude=location.get("attributes").get("latitude"),
longitude=location.get("attributes").get("longitude"),
+ country=location.get("attributes").get("country"),
)
)
@@ -322,10 +445,11 @@ class DnacSite:
floor=dict(
name=site[0].get("name"),
parentName=site[0].get("siteNameHierarchy").split("/" + site[0].get("name"))[0],
- rfModel=floor_plan.get(rf_model),
+ rf_model=floor_plan.get(rf_model),
width=map_geometry.get("attributes").get("width"),
length=map_geometry.get("attributes").get("length"),
- height=map_geometry.get("attributes").get("height")
+ height=map_geometry.get("attributes").get("height"),
+ floorNumber=map_geometry.get("attributes").get("floor_number", "")
)
)
@@ -335,12 +459,29 @@ class DnacSite:
siteId=site[0].get("id")
)
- if self.log:
- log(str(current_site))
+ self.log("Current site details: {0}".format(str(current_site)), "INFO")
return current_site
def site_exists(self):
+ """
+ Check if the site exists in Cisco Catalyst Center.
+
+ Parameters:
+ - self (object): An instance of the class containing the method.
+ Returns:
+ - tuple: A tuple containing a boolean indicating whether the site exists and
+ a dictionary containing information about the existing site.
+ The returned tuple includes two elements:
+ - site_exists (bool): Indicates whether the site exists.
+ - dict: Contains information about the existing site. If the
+ site doesn't exist, this dictionary is empty.
+ Description:
+ Checks the existence of a site in Cisco Catalyst Center by querying the
+ 'get_site' function in the 'sites' family. It utilizes the
+ 'site_name' parameter from the 'want' attribute to identify the site.
+ """
+
site_exists = False
current_site = {}
response = None
@@ -352,79 +493,230 @@ class DnacSite:
)
except Exception as e:
- if self.log:
- log("The input site is not valid or site is not present.")
-
+ self.log("The provided site name '{0}' is either invalid or not present in the Cisco Catalyst Center."
+ .format(self.want.get("site_name")), "WARNING")
if response:
- if self.log:
- log(str(response))
-
response = response.get("response")
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
current_site = self.get_current_site(response)
site_exists = True
-
- if self.log:
- log(str(self.validated))
+ self.log("Site '{0}' exists in Cisco Catalyst Center".format(self.want.get("site_name")), "INFO")
return (site_exists, current_site)
def get_site_params(self, params):
- site = params.get("site")
+ """
+ Store the site-related parameters.
+
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - params (dict): Dictionary containing site-related parameters.
+ Returns:
+ - dict: Dictionary containing the stored site-related parameters.
+ The returned dictionary includes the following keys:
+ - 'type' (str): The type of the site.
+ - 'site' (dict): Dictionary containing site-related info.
+ Description:
+ This method takes a dictionary 'params' containing site-related
+ information and stores the relevant parameters based on the site
+ type. If the site type is 'floor', it ensures that the 'rfModel'
+ parameter is stored in uppercase.
+ """
typeinfo = params.get("type")
+ site_info = {}
- if typeinfo == "floor":
- site["floor"]["rfModel"] = site.get("floor").get("rfModel").upper()
+ if typeinfo == 'area':
+ area_details = params.get('site').get('area')
+ site_info['area'] = {
+ 'name': area_details.get('name'),
+ 'parentName': area_details.get('parent_name')
+ }
+ elif typeinfo == 'building':
+ building_details = params.get('site').get('building')
+ site_info['building'] = {
+ 'name': building_details.get('name'),
+ 'address': building_details.get('address'),
+ 'parentName': building_details.get('parent_name'),
+ 'latitude': building_details.get('latitude'),
+ 'longitude': building_details.get('longitude'),
+ 'country': building_details.get('country')
+ }
+ else:
+ floor_details = params.get('site').get('floor')
+ site_info['floor'] = {
+ 'name': floor_details.get('name'),
+ 'parentName': floor_details.get('parent_name'),
+ 'length': floor_details.get('length'),
+ 'width': floor_details.get('width'),
+ 'height': floor_details.get('height'),
+ 'floorNumber': floor_details.get('floor_number', '')
+ }
+ try:
+ site_info["floor"]["rfModel"] = floor_details.get("rf_model")
+ except Exception as e:
+ self.log("The attribute 'rf_model' is missing in floor '{0}'.".format(floor_details.get('name')), "WARNING")
site_params = dict(
type=typeinfo,
- site=site,
+ site=site_info,
)
+ self.log("Site parameters: {0}".format(str(site_params)), "DEBUG")
return site_params
def get_site_name(self, site):
+ """
+ Get and Return the site name.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - site (dict): A dictionary containing information about the site.
+ Returns:
+ - str: The constructed site name.
+ Description:
+ This method takes a dictionary 'site' containing information about
+ the site and constructs the site name by combining the parent name
+ and site name.
+ """
+
site_type = site.get("type")
- parent_name = site.get("site").get(site_type).get("parentName")
+ parent_name = site.get("site").get(site_type).get("parent_name")
name = site.get("site").get(site_type).get("name")
site_name = '/'.join([parent_name, name])
-
- if self.log:
- log(site_name)
+ self.log("Site name: {0}".format(site_name), "INFO")
return site_name
- def site_requires_update(self):
- requested_site = self.want.get("site_params")
- current_site = self.have.get("current_site")
+ def compare_float_values(self, ele1, ele2, precision=2):
+ """
+ Compare two floating-point values with a specified precision.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - ele1 (float): The first floating-point value to be compared.
+ - ele2 (float): The second floating-point value to be compared.
+ - precision (int, optional): The number of decimal places to consider in the comparison, Defaults to 2.
+ Return:
+ bool: True if the rounded values are equal within the specified precision, False otherwise.
+ Description:
+ This method compares two floating-point values, ele1 and ele2, by rounding them
+ to the specified precision and checking if the rounded values are equal. It returns
+ True if the rounded values are equal within the specified precision, and False otherwise.
+ """
+
+ return round(float(ele1), precision) == round(float(ele2), precision)
+
+ def is_area_updated(self, updated_site, requested_site):
+ """
+ Check if the area site details have been updated.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the area details (name and parent name) have been updated, False otherwise.
+ Description:
+ This method compares the area details (name and parent name) of the updated site
+ with the requested site and returns True if they are equal, indicating that the area
+ details have been updated. Returns False if there is a mismatch in the area site details.
+ """
+
+ return (
+ updated_site['name'] == requested_site['name'] and
+ updated_site['parentName'] == requested_site['parentName']
+ )
- if self.log:
- log("Current Site: " + str(current_site))
- log("Requested Site: " + str(requested_site))
+ def is_building_updated(self, updated_site, requested_site):
+ """
+ Check if the building details in a site have been updated.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the building details have been updated, False otherwise.
+ Description:
+ This method compares the building details of the updated site with the requested site.
+ It checks if the name, parent_name, latitude, longitude, and address (if provided) are
+ equal, indicating that the building details have been updated. Returns True if the
+ details match, and False otherwise.
+ """
+
+ return (
+ updated_site['name'] == requested_site['name'] and
+ updated_site['parentName'] == requested_site['parentName'] and
+ self.compare_float_values(updated_site['latitude'], requested_site['latitude']) and
+ self.compare_float_values(updated_site['longitude'], requested_site['longitude']) and
+ ('address' in requested_site and (requested_site['address'] is None or updated_site.get('address') == requested_site['address']))
+ )
- obj_params = [
- ("type", "type"),
- ("site", "site")
- ]
+ def is_floor_updated(self, updated_site, requested_site):
+ """
+ Check if the floor details in a site have been updated.
- return any(not dnac_compare_equality(current_site.get(dnac_param),
- requested_site.get(ansible_param))
- for (dnac_param, ansible_param) in obj_params)
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the floor details have been updated, False otherwise.
+ Description:
+ This method compares the floor details of the updated site with the requested site.
+ It checks if the name, rf_model, length, width, and height are equal, indicating
+ that the floor details have been updated. Returns True if the details match, and False otherwise.
+ """
- def get_execution_details(self, execid):
- response = None
- response = self.dnac._exec(
- family="task",
- function='get_business_api_execution_details',
- params={"execution_id": execid}
- )
+ keys_to_compare = ['length', 'width', 'height']
+ if updated_site['name'] != requested_site['name'] or updated_site['rf_model'] != requested_site['rfModel']:
+ return False
- if self.log:
- log(str(response))
+ for key in keys_to_compare:
+ if not self.compare_float_values(updated_site[key], requested_site[key]):
+ return False
- if response and isinstance(response, dict):
- return response
+ return True
+
+ def site_requires_update(self):
+ """
+ Check if the site requires updates.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ bool: True if the site requires updates, False otherwise.
+ Description:
+ This method compares the site parameters of the current site
+ ('current_site') and the requested site parameters ('requested_site')
+ stored in the 'want' attribute. It checks for differences in
+ specified parameters, such as the site type and site details.
+ """
+
+ type = self.have['current_site']['type']
+ updated_site = self.have['current_site']['site'][type]
+ requested_site = self.want['site_params']['site'][type]
+ self.log("Current Site type: {0}".format(str(updated_site)), "INFO")
+ self.log("Requested Site type: {0}".format(str(requested_site)), "INFO")
+
+ if type == "building":
+ return not self.is_building_updated(updated_site, requested_site)
+
+ elif type == "floor":
+ return not self.is_floor_updated(updated_site, requested_site)
+
+ return not self.is_area_updated(updated_site, requested_site)
+
+ def get_have(self, config):
+ """
+ Get the site details from Cisco Catalyst Center
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): A dictionary containing the configuration details.
+ Returns:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method queries Cisco Catalyst Center to check if a specified site
+ exists. If the site exists, it retrieves details about the current
+ site, including the site ID and other relevant information. The
+ results are stored in the 'have' attribute for later reference.
+ """
- def get_have(self):
site_exists = False
current_site = None
have = {}
@@ -432,8 +724,7 @@ class DnacSite:
# check if given site exits, if exists store current site info
(site_exists, current_site) = self.site_exists()
- if self.log:
- log("Site Exists: " + str(site_exists) + "\n Current Site:" + str(current_site))
+ self.log("Current Site details (have): {0}".format(str(current_site)), "DEBUG")
if site_exists:
have["site_id"] = current_site.get("siteId")
@@ -441,19 +732,52 @@ class DnacSite:
have["current_site"] = current_site
self.have = have
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_want(self, config):
+ """
+ Get all site-related information from the playbook needed for creation/updation/deletion of site in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing configuration information.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ Retrieves all site-related information from playbook that is
+ required for creating a site in Cisco Catalyst Center. It includes
+ parameters such as 'site_params' and 'site_name.' The gathered
+ information is stored in the 'want' attribute for later reference.
+ """
- def get_want(self):
want = {}
-
- for site in self.validated:
- want = dict(
- site_params=self.get_site_params(site),
- site_name=self.get_site_name(site),
- )
-
+ want = dict(
+ site_params=self.get_site_params(config),
+ site_name=self.get_site_name(config),
+ )
self.want = want
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update/Create site information in Cisco Catalyst Center with fields
+ provided in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing configuration information.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method determines whether to update or create a site in Cisco Catalyst Center based on the provided
+ configuration information. If the specified site exists, the method checks if it requires an update
+ by calling the 'site_requires_update' method. If an update is required, it calls the 'update_site'
+ function from the 'sites' family of the Cisco Catalyst Center API. If the site does not require an update,
+ the method exits, indicating that the site is up to date.
+ """
- def get_diff_merge(self):
site_updated = False
site_created = False
@@ -463,6 +787,7 @@ class DnacSite:
# Existing Site requires update
site_params = self.want.get("site_params")
site_params["site_id"] = self.have.get("site_id")
+
response = self.dnac._exec(
family="sites",
function='update_site',
@@ -474,19 +799,28 @@ class DnacSite:
else:
# Site does not neet update
self.result['response'] = self.have.get("current_site")
- self.result['msg'] = "Site does not need update"
- self.module.exit_json(**self.result)
+ self.msg = "Site - {0} does not need any update".format(self.have.get("current_site"))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ return self
else:
# Creating New Site
+ site_params = self.want.get("site_params")
+ if site_params['site']['building']:
+ building_details = {}
+ for key, value in site_params['site']['building'].items():
+ if value is not None:
+ building_details[key] = value
+ site_params['site']['building'] = building_details
+
response = self.dnac._exec(
family="sites",
function='create_site',
op_modifies=True,
- params=self.want.get("site_params"),
+ params=site_params,
)
-
- log(str(response))
+ self.log("Received API response from 'create_site': {0}".format(str(response)), "DEBUG")
site_created = True
if site_created or site_updated:
@@ -505,8 +839,9 @@ class DnacSite:
break
if site_updated:
- log("Site Updated Successfully")
- self.result['msg'] = "Site Updated Successfully"
+ self.msg = "Site - {0} Updated Successfully".format(self.want.get("site_name"))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
self.result['response'].update({"siteId": self.have.get("site_id")})
else:
@@ -514,76 +849,237 @@ class DnacSite:
(site_exists, current_site) = self.site_exists()
if site_exists:
- log("Site Created Successfully")
- log("Current site:" + str(current_site))
- self.result['msg'] = "Site Created Successfully"
+ self.msg = "Site '{0}' created successfully".format(self.want.get("site_name"))
+ self.log(self.msg, "INFO")
+ self.log("Current site (have): {0}".format(str(current_site)), "DEBUG")
+ self.result['msg'] = self.msg
self.result['response'].update({"siteId": current_site.get('site_id')})
- def get_diff_delete(self):
- site_exists = self.have.get("site_exists")
+ return self
+
+ def delete_single_site(self, site_id, site_name):
+ """"
+ Delete a single site in the Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_id (str): The ID of the site to be deleted.
+ site_name (str): The name of the site to be deleted.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function initiates the deletion of a site in the Cisco Catalyst Center by calling the delete API.
+ If the deletion is successful, the result is marked as changed, and the status is set to "success."
+ If an error occurs during the deletion process, the status is set to "failed," and the log contains
+ details about the error.
+ """
- if site_exists:
+ try:
response = self.dnac._exec(
family="sites",
function="delete_site",
- params={"site_id": self.have.get("site_id")},
+ params={"site_id": site_id},
)
if response and isinstance(response, dict):
+ self.log("Received API response from 'delete_site': {0}".format(str(response)), "DEBUG")
executionid = response.get("executionId")
+
while True:
execution_details = self.get_execution_details(executionid)
if execution_details.get("status") == "SUCCESS":
+ self.msg = "Site '{0}' deleted successfully".format(site_name)
self.result['changed'] = True
- self.result['response'] = execution_details
- self.result['response'].update({"siteId": self.have.get("site_id")})
- self.result['msg'] = "Site deleted successfully"
+ self.result['response'] = self.msg
+ self.status = "success"
+ self.log(self.msg, "INFO")
break
-
elif execution_details.get("bapiError"):
- self.module.fail_json(msg=execution_details.get("bapiError"),
- response=execution_details)
+ self.log("Error response for 'delete_site' execution: {0}".format(execution_details.get("bapiError")), "ERROR")
+ self.module.fail_json(msg=execution_details.get("bapiError"), response=execution_details)
break
- else:
- self.module.fail_json(msg="Site Not Found", response=[])
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Exception occurred while deleting site '{0}' due to: {1}".format(site_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Call Cisco Catalyst Center API to delete sites with provided inputs.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): Dictionary containing information for site deletion.
+ Returns:
+ - self: The result dictionary includes the following keys:
+ - 'changed' (bool): Indicates whether changes were made
+ during the deletion process.
+ - 'response' (dict): Contains details about the execution
+ and the deleted site ID.
+ - 'msg' (str): A message indicating the status of the deletion operation.
+ Description:
+ This method initiates the deletion of a site by calling the 'delete_site' function in the 'sites' family
+ of the Cisco Catalyst Center API. It uses the site ID obtained from the 'have' attribute.
+ """
+
+ site_exists = self.have.get("site_exists")
+ site_name = self.want.get("site_name")
+ if not site_exists:
+ self.status = "success"
+ self.msg = "Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name)
+ self.result.update({'changed': False,
+ 'response': self.msg,
+ 'msg': self.msg})
+ self.log(self.msg, "INFO")
+
+ return self
+
+ # Check here if the site have the childs then fetch it using get membership API and then sort it
+ # in reverse order and start deleting from bottom to top
+ site_id = self.have.get("site_id")
+ mem_response = self.dnac._exec(
+ family="sites",
+ function="get_membership",
+ params={"site_id": site_id},
+ )
+ site_response = mem_response.get("site").get("response")
+ self.log("Site {0} response along with it's child sites: {1}".format(site_name, str(site_response)), "DEBUG")
+
+ if len(site_response) == 0:
+ self.delete_single_site(site_id, site_name)
+ return self
+
+ # Sorting the response in reverse order based on hierarchy levels
+ sorted_site_resp = sorted(site_response, key=lambda x: x.get("groupHierarchy"), reverse=True)
+
+ # Deleting each level in reverse order till topmost parent site
+ for item in sorted_site_resp:
+ self.delete_single_site(item['id'], item['name'])
+
+ # Delete the final parent site
+ self.delete_single_site(site_id, site_name)
+ self.msg = "The site '{0}' and its child sites have been deleted successfully".format(site_name)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of site configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ site exists in the Catalyst Center configuration.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ # Code to validate dnac config for merged state
+ site_exist = self.have.get("site_exists")
+ site_name = self.want.get("site_name")
+
+ if site_exist:
+ self.status = "success"
+ self.msg = "The requested site '{0}' is present in the Cisco Catalyst Center and its creation has been verified.".format(site_name)
+ self.log(self.msg, "INFO")
+
+ require_update = self.site_requires_update()
+
+ if not require_update:
+ self.log("The update for site '{0}' has been successfully verified.".format(site_name), "INFO")
+ self. status = "success"
+ return self
+
+ self.log("""The playbook input for site '{0}' does not align with the Cisco Catalyst Center, indicating that the merge task
+ may not have executed successfully.""".format(site_name), "INFO")
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of site configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified site exists in the Catalyst Center configuration.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ # Code to validate dnac config for delete state
+ site_exist = self.have.get("site_exists")
+
+ if not site_exist:
+ self.status = "success"
+ msg = """The requested site '{0}' has already been deleted from the Cisco Catalyst Center and this has been
+ successfully verified.""".format(self.want.get("site_name"))
+ self.log(msg, "INFO")
+ return self
+ self.log("""Mismatch between the playbook input for site '{0}' and the Cisco Catalyst Center indicates that
+ the deletion was not executed successfully.""".format(self.want.get("site_name")), "INFO")
+
+ return self
def main():
""" main entry point for module execution
"""
- element_spec = dict(
- dnac_host=dict(required=True, type='str'),
- dnac_port=dict(type='str', default='443'),
- dnac_username=dict(type='str', default='admin', aliases=["user"]),
- dnac_password=dict(type='str', no_log=True),
- dnac_verify=dict(type='bool', default='True'),
- dnac_version=dict(type="str", default="2.2.3.3"),
- dnac_debug=dict(type='bool', default=False),
- dnac_log=dict(type='bool', default=False),
- validate_response_schema=dict(type="bool", default=True),
- config=dict(required=True, type='list', elements='dict'),
- state=dict(
- default='merged',
- choices=['merged', 'deleted']),
- )
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
module = AnsibleModule(argument_spec=element_spec,
supports_check_mode=False)
dnac_site = DnacSite(module)
- dnac_site.validate_input()
- state = dnac_site.get_state()
-
- dnac_site.get_want()
- dnac_site.get_have()
-
- if state == "merged":
- dnac_site.get_diff_merge()
-
- elif state == "deleted":
- dnac_site.get_diff_delete()
+ state = dnac_site.params.get("state")
+
+ if state not in dnac_site.supported_states:
+ dnac_site.status = "invalid"
+ dnac_site.msg = "State {0} is invalid".format(state)
+ dnac_site.check_return_status()
+
+ dnac_site.validate_input().check_return_status()
+ config_verify = dnac_site.params.get("config_verify")
+
+ for config in dnac_site.validated_config:
+ dnac_site.reset_values()
+ dnac_site.get_want(config).check_return_status()
+ dnac_site.get_have(config).check_return_status()
+ dnac_site.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_site.verify_diff_state_apply[state](config).check_return_status()
module.exit_json(**dnac_site.result)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py
new file mode 100644
index 000000000..1ae28afd8
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py
@@ -0,0 +1,1087 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary, Abhishek Maheshwari")
+
+DOCUMENTATION = r"""
+---
+module: site_workflow_manager
+short_description: Resource module for Site operations
+description:
+- Manage operation create, update and delete of the resource Sites.
+- Creates site with area/building/floor with specified hierarchy.
+- Updates site with area/building/floor with specified hierarchy.
+- Deletes site with area/building/floor with specified hierarchy.
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Madhan Sankaranarayanan (@madhansansel)
+ Rishita Chowdhary (@rishitachowdhary)
+ Abhishek Maheshwari (@abhishekmaheshwari)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description: It represents a list of details for creating/managing/deleting sites, including areas, buildings, and floors.
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ site_type:
+ description: Type of site to create/update/delete (eg area, building, floor).
+ type: str
+ site:
+ description: Contains details about the site being managed including areas, buildings and floors.
+ type: dict
+ suboptions:
+ area:
+ description: Configuration details for creating or managing an area within a site.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the area to be created or managed (e.g., "Area1").
+ type: str
+ parent_name:
+ description: The full name of the parent under which the area will be created/managed/deleted (e.g., "Global/USA").
+ type: str
+ building:
+ description: Configuration details required for creating or managing a building within a site.
+ type: dict
+ suboptions:
+ address:
+ description: Physical address of the building that is to be created or managed.
+ type: str
+ latitude:
+ description: Geographical latitude coordinate of the building. For example, use 37.338 for a location in San Jose, California.
+ Valid values range from -90.0 to +90.0 degrees.
+ type: float
+ longitude:
+ description: Geographical longitude coordinate of the building. For example, use -121.832 for a location in San Jose, California.
+ Valid values range from -180.0 to +180.0 degrees.
+ type: float
+ name:
+ description: Name of the building (e.g., "Building1").
+ type: str
+ parent_name:
+ description: Hierarchical parent path of the building, indicating its location within the site (e.g., "Global/USA/San Francisco").
+ type: str
+ floor:
+ description: Configuration details required for creating or managing a floor within a site.
+ type: dict
+ suboptions:
+ height:
+ description: Height of the floor in feet (e.g., 15.23).
+ type: float
+ length:
+ description: Length of the floor in feet (e.g., 100.11).
+ type: float
+ name:
+ description: Name of the floor (e.g., "Floor-1").
+ type: str
+ parent_name:
+ description: Hierarchical parent path of the floor, indicating its location within the site (e.g.,
+ "Global/USA/San Francisco/BGL_18").
+ type: str
+ rf_model:
+ description: The RF (Radio Frequency) model type for the floor, which is essential for simulating and optimizing wireless
+ network coverage. Select from the following allowed values, which describe different environmental signal propagation
+ characteristics.
+ Type of floor (allowed values are 'Cubes And Walled Offices', 'Drywall Office Only', 'Indoor High Ceiling',
+ 'Outdoor Open Space').
+ Cubes And Walled Offices - This RF model typically represents indoor areas with cubicles or walled offices, where
+ radio signals may experience attenuation due to walls and obstacles.
+ Drywall Office Only - This RF model indicates an environment with drywall partitions, commonly found in office spaces,
+ which may have moderate signal attenuation.
+ Indoor High Ceiling - This RF model is suitable for indoor spaces with high ceilings, such as auditoriums or atriums,
+ where signal propagation may differ due to the height of the ceiling.
+ Outdoor Open Space - This RF model is used for outdoor areas with open spaces, where signal propagation is less obstructed
+ and may follow different patterns compared to indoor environments.
+ type: str
+ width:
+ description: Width of the floor in feet (e.g., 100.22).
+ type: float
+ floor_number:
+ description: Floor number within the building site (e.g., 5). This value can only be specified during the creation of the
+ floor and cannot be modified afterward.
+ type: int
+
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Method used are
+ sites.Sites.create_site,
+ sites.Sites.update_site,
+ sites.Sites.delete_site
+
+ - Paths used are
+ post /dna/intent/api/v1/site,
+ put dna/intent/api/v1/site/{siteId},
+ delete dna/intent/api/v1/site/{siteId}
+"""
+
+EXAMPLES = r"""
+- name: Create a new area site
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ area:
+ name: Test
+ parent_name: Global/India
+ site_type: area
+
+- name: Create a new building site
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ building:
+ name: Building_1
+ parent_name: Global/India
+ address: Bengaluru, Karnataka, India
+ latitude: 24.12
+ longitude: 23.45
+ site_type: building
+
+- name: Create a Floor site under the building
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ length: 75.76
+ width: 35.54
+ height: 30.12
+ rf_model: Cubes And Walled Offices
+ floor_number: 2
+ site_type: floor
+
+- name: Updating the Floor details under the building
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: merged
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ length: 75.76
+ width: 35.54
+ height: 30.12
+ site_type: floor
+
+- name: Deleting any site you need site name and parent name
+ cisco.dnac.site_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: "{{dnac_log}}"
+ state: deleted
+ config:
+ - site:
+ floor:
+ name: Floor_1
+ parent_name: Global/India/Building_1
+ site_type: floor
+"""
+
+RETURN = r"""
+#Case_1: Site is successfully created/updated/deleted
+response_1:
+ description: A dictionary with API execution details as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "bapiExecutionId": String,
+ "bapiKey": String,
+ "bapiName": String,
+ "endTime": String,
+ "endTimeEpoch": 0,
+ "runtimeInstanceId": String,
+ "siteId": String,
+ "startTime": String,
+ "startTimeEpoch": 0,
+ "status": String,
+ "timeDuration": 0
+
+ },
+ "msg": "string"
+ }
+
+#Case_2: Site exits and does not need an update
+response_2:
+ description: A dictionary with existing site details.
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "site": {},
+ "siteId": String,
+ "type": String
+ },
+ "msg": String
+ }
+
+#Case_3: Error while creating/updating/deleting site
+response_3:
+ description: A dictionary with API execution details as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "bapiError": String,
+ "bapiExecutionId": String,
+ "bapiKey": String,
+ "bapiName": String,
+ "endTime": String,
+ "endTimeEpoch": 0,
+ "runtimeInstanceId": String,
+ "startTime": String,
+ "startTimeEpoch": 0,
+ "status": String,
+ "timeDuration": 0
+
+ },
+ "msg": "string"
+ }
+
+#Case_4: Site not found when atempting to delete site
+response_4:
+ description: A list with the response returned by the Cisco Catalyst Center Python
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+)
+
+floor_plan = {
+ '101101': 'Cubes And Walled Offices',
+ '101102': 'Drywall Office Only',
+ '101105': 'Free Space',
+ '101104': 'Indoor High Ceiling',
+ '101103': 'Outdoor Open Space'
+}
+
+
+class Site(DnacBase):
+ """Class containing member attributes for Site workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.supported_states = ["merged", "deleted"]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed', and
+ 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.status = "success"
+ self.msg = "Configuration is not available in the playbook for validation"
+ self.log(self.msg, "ERROR")
+ return self
+
+ temp_spec = dict(
+ type=dict(required=False, type='str'),
+ site=dict(required=True, type='dict'),
+ )
+ self.config = self.update_site_type_key(self.config)
+
+ # Validate site params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params)
+ )
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_current_site(self, site):
+ """
+ Get the current site information.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - site (list): A list containing information about the site.
+ Returns:
+ - dict: A dictionary containing the extracted site information.
+ Description:
+ This method extracts information about the current site based on
+ the provided 'site' list. It determines the type of the site
+ (area, building, or floor) and retrieves specific details
+ accordingly. The resulting dictionary includes the type, site
+ details, and the site ID.
+ """
+
+ site_info = {}
+
+ location = get_dict_result(site[0].get("additionalInfo"), 'nameSpace', "Location")
+ typeinfo = location.get("attributes").get("type")
+
+ if typeinfo == "area":
+ site_info = dict(
+ area=dict(
+ name=site[0].get("name"),
+ parentName=site[0].get("siteNameHierarchy").split("/" + site[0].get("name"))[0]
+ )
+ )
+
+ elif typeinfo == "building":
+ site_info = dict(
+ building=dict(
+ name=site[0].get("name"),
+ parentName=site[0].get("siteNameHierarchy").split("/" + site[0].get("name"))[0],
+ address=location.get("attributes").get("address"),
+ latitude=location.get("attributes").get("latitude"),
+ longitude=location.get("attributes").get("longitude"),
+ country=location.get("attributes").get("country"),
+ )
+ )
+
+ elif typeinfo == "floor":
+ map_geometry = get_dict_result(site[0].get("additionalInfo"), 'nameSpace', "mapGeometry")
+ map_summary = get_dict_result(site[0].get("additionalInfo"), 'nameSpace', "mapsSummary")
+ rf_model = map_summary.get("attributes").get("rfModel")
+
+ site_info = dict(
+ floor=dict(
+ name=site[0].get("name"),
+ parentName=site[0].get("siteNameHierarchy").split("/" + site[0].get("name"))[0],
+ rf_model=floor_plan.get(rf_model),
+ width=map_geometry.get("attributes").get("width"),
+ length=map_geometry.get("attributes").get("length"),
+ height=map_geometry.get("attributes").get("height"),
+ floorNumber=map_geometry.get("attributes").get("floor_number", "")
+ )
+ )
+
+ current_site = dict(
+ type=typeinfo,
+ site=site_info,
+ siteId=site[0].get("id")
+ )
+
+ self.log("Current site details: {0}".format(str(current_site)), "INFO")
+
+ return current_site
+
+ def site_exists(self):
+ """
+ Check if the site exists in Cisco Catalyst Center.
+
+ Parameters:
+ - self (object): An instance of the class containing the method.
+ Returns:
+ - tuple: A tuple containing a boolean indicating whether the site exists and
+ a dictionary containing information about the existing site.
+ The returned tuple includes two elements:
+ - site_exists (bool): Indicates whether the site exists.
+ - dict: Contains information about the existing site. If the
+ site doesn't exist, this dictionary is empty.
+ Description:
+ Checks the existence of a site in Cisco Catalyst Center by querying the
+ 'get_site' function in the 'sites' family. It utilizes the
+ 'site_name' parameter from the 'want' attribute to identify the site.
+ """
+
+ site_exists = False
+ current_site = {}
+ response = None
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": self.want.get("site_name")},
+ )
+
+ except Exception as e:
+ self.log("The provided site name '{0}' is either invalid or not present in the Cisco Catalyst Center."
+ .format(self.want.get("site_name")), "WARNING")
+ if response:
+ response = response.get("response")
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
+ current_site = self.get_current_site(response)
+ site_exists = True
+ self.log("Site '{0}' exists in Cisco Catalyst Center".format(self.want.get("site_name")), "INFO")
+
+ return (site_exists, current_site)
+
+ def get_site_params(self, params):
+ """
+ Store the site-related parameters.
+
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - params (dict): Dictionary containing site-related parameters.
+ Returns:
+ - dict: Dictionary containing the stored site-related parameters.
+ The returned dictionary includes the following keys:
+ - 'type' (str): The type of the site.
+ - 'site' (dict): Dictionary containing site-related info.
+ Description:
+ This method takes a dictionary 'params' containing site-related
+ information and stores the relevant parameters based on the site
+ type. If the site type is 'floor', it ensures that the 'rfModel'
+ parameter is stored in uppercase.
+ """
+ typeinfo = params.get("type")
+ site_info = {}
+
+ if typeinfo == 'area':
+ area_details = params.get('site').get('area')
+ site_info['area'] = {
+ 'name': area_details.get('name'),
+ 'parentName': area_details.get('parent_name')
+ }
+ elif typeinfo == 'building':
+ building_details = params.get('site').get('building')
+ site_info['building'] = {
+ 'name': building_details.get('name'),
+ 'address': building_details.get('address'),
+ 'parentName': building_details.get('parent_name'),
+ 'latitude': building_details.get('latitude'),
+ 'longitude': building_details.get('longitude'),
+ 'country': building_details.get('country')
+ }
+ else:
+ floor_details = params.get('site').get('floor')
+ site_info['floor'] = {
+ 'name': floor_details.get('name'),
+ 'parentName': floor_details.get('parent_name'),
+ 'length': floor_details.get('length'),
+ 'width': floor_details.get('width'),
+ 'height': floor_details.get('height'),
+ 'floorNumber': floor_details.get('floor_number', '')
+ }
+ try:
+ site_info["floor"]["rfModel"] = floor_details.get("rf_model")
+ except Exception as e:
+ self.log("The attribute 'rf_model' is missing in floor '{0}'.".format(floor_details.get('name')), "WARNING")
+
+ site_params = dict(
+ type=typeinfo,
+ site=site_info,
+ )
+ self.log("Site parameters: {0}".format(str(site_params)), "DEBUG")
+
+ return site_params
+
+ def get_site_name(self, site):
+ """
+ Get and Return the site name.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - site (dict): A dictionary containing information about the site.
+ Returns:
+ - str: The constructed site name.
+ Description:
+ This method takes a dictionary 'site' containing information about
+ the site and constructs the site name by combining the parent name
+ and site name.
+ """
+
+ site_type = site.get("type")
+ parent_name = site.get("site").get(site_type).get("parent_name")
+ name = site.get("site").get(site_type).get("name")
+ site_name = '/'.join([parent_name, name])
+ self.log("Site name: {0}".format(site_name), "INFO")
+
+ return site_name
+
+ def compare_float_values(self, ele1, ele2, precision=2):
+ """
+ Compare two floating-point values with a specified precision.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - ele1 (float): The first floating-point value to be compared.
+ - ele2 (float): The second floating-point value to be compared.
+ - precision (int, optional): The number of decimal places to consider in the comparison, Defaults to 2.
+ Return:
+ bool: True if the rounded values are equal within the specified precision, False otherwise.
+ Description:
+ This method compares two floating-point values, ele1 and ele2, by rounding them
+ to the specified precision and checking if the rounded values are equal. It returns
+ True if the rounded values are equal within the specified precision, and False otherwise.
+ """
+
+ return round(float(ele1), precision) == round(float(ele2), precision)
+
+ def is_area_updated(self, updated_site, requested_site):
+ """
+ Check if the area site details have been updated.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the area details (name and parent name) have been updated, False otherwise.
+ Description:
+ This method compares the area details (name and parent name) of the updated site
+ with the requested site and returns True if they are equal, indicating that the area
+ details have been updated. Returns False if there is a mismatch in the area site details.
+ """
+
+ return (
+ updated_site['name'] == requested_site['name'] and
+ updated_site['parentName'] == requested_site['parentName']
+ )
+
+ def is_building_updated(self, updated_site, requested_site):
+ """
+ Check if the building details in a site have been updated.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the building details have been updated, False otherwise.
+ Description:
+ This method compares the building details of the updated site with the requested site.
+ It checks if the name, parent_name, latitude, longitude, and address (if provided) are
+ equal, indicating that the building details have been updated. Returns True if the
+ details match, and False otherwise.
+ """
+
+ return (
+ updated_site['name'] == requested_site['name'] and
+ updated_site['parentName'] == requested_site['parentName'] and
+ self.compare_float_values(updated_site['latitude'], requested_site['latitude']) and
+ self.compare_float_values(updated_site['longitude'], requested_site['longitude']) and
+ ('address' in requested_site and (requested_site['address'] is None or updated_site.get('address') == requested_site['address']))
+ )
+
+ def is_floor_updated(self, updated_site, requested_site):
+ """
+ Check if the floor details in a site have been updated.
+
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - updated_site (dict): The site details after the update.
+ - requested_site (dict): The site details as requested for the update.
+ Return:
+ bool: True if the floor details have been updated, False otherwise.
+ Description:
+ This method compares the floor details of the updated site with the requested site.
+ It checks if the name, rf_model, length, width, and height are equal, indicating
+ that the floor details have been updated. Returns True if the details match, and False otherwise.
+ """
+
+ keys_to_compare = ['length', 'width', 'height']
+ if updated_site['name'] != requested_site['name'] or updated_site['rf_model'] != requested_site['rfModel']:
+ return False
+
+ for key in keys_to_compare:
+ if not self.compare_float_values(updated_site[key], requested_site[key]):
+ return False
+
+ return True
+
+ def site_requires_update(self):
+ """
+ Check if the site requires updates.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ bool: True if the site requires updates, False otherwise.
+ Description:
+ This method compares the site parameters of the current site
+ ('current_site') and the requested site parameters ('requested_site')
+ stored in the 'want' attribute. It checks for differences in
+ specified parameters, such as the site type and site details.
+ """
+
+ type = self.have['current_site']['type']
+ updated_site = self.have['current_site']['site'][type]
+ requested_site = self.want['site_params']['site'][type]
+ self.log("Current Site type: {0}".format(str(updated_site)), "INFO")
+ self.log("Requested Site type: {0}".format(str(requested_site)), "INFO")
+
+ if type == "building":
+ return not self.is_building_updated(updated_site, requested_site)
+
+ elif type == "floor":
+ return not self.is_floor_updated(updated_site, requested_site)
+
+ return not self.is_area_updated(updated_site, requested_site)
+
+ def get_have(self, config):
+ """
+ Get the site details from Cisco Catalyst Center
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): A dictionary containing the configuration details.
+ Returns:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method queries Cisco Catalyst Center to check if a specified site
+ exists. If the site exists, it retrieves details about the current
+ site, including the site ID and other relevant information. The
+ results are stored in the 'have' attribute for later reference.
+ """
+
+ site_exists = False
+ current_site = None
+ have = {}
+
+ # check if given site exits, if exists store current site info
+ (site_exists, current_site) = self.site_exists()
+
+ self.log("Current Site details (have): {0}".format(str(current_site)), "DEBUG")
+
+ if site_exists:
+ have["site_id"] = current_site.get("siteId")
+ have["site_exists"] = site_exists
+ have["current_site"] = current_site
+
+ self.have = have
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_want(self, config):
+ """
+ Get all site-related information from the playbook needed for creation/updation/deletion of site in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing configuration information.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ Retrieves all site-related information from playbook that is
+ required for creating a site in Cisco Catalyst Center. It includes
+ parameters such as 'site_params' and 'site_name.' The gathered
+ information is stored in the 'want' attribute for later reference.
+ """
+
+ want = {}
+ want = dict(
+ site_params=self.get_site_params(config),
+ site_name=self.get_site_name(config),
+ )
+ self.want = want
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update/Create site information in Cisco Catalyst Center with fields
+ provided in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing configuration information.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method determines whether to update or create a site in Cisco Catalyst Center based on the provided
+ configuration information. If the specified site exists, the method checks if it requires an update
+ by calling the 'site_requires_update' method. If an update is required, it calls the 'update_site'
+ function from the 'sites' family of the Cisco Catalyst Center API. If the site does not require an update,
+ the method exits, indicating that the site is up to date.
+ """
+
+ site_updated = False
+ site_created = False
+
+ # check if the given site exists and/or needs to be updated/created.
+ if self.have.get("site_exists"):
+ if self.site_requires_update():
+ # Existing Site requires update
+ site_params = self.want.get("site_params")
+ site_params["site_id"] = self.have.get("site_id")
+
+ response = self.dnac._exec(
+ family="sites",
+ function='update_site',
+ op_modifies=True,
+ params=site_params,
+ )
+ site_updated = True
+
+ else:
+ # Site does not neet update
+ self.result['response'] = self.have.get("current_site")
+ self.msg = "Site - {0} does not need any update".format(self.have.get("current_site"))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ return self
+
+ else:
+ # Creating New Site
+ site_params = self.want.get("site_params")
+ if site_params['site']['building']:
+ building_details = {}
+ for key, value in site_params['site']['building'].items():
+ if value is not None:
+ building_details[key] = value
+ site_params['site']['building'] = building_details
+
+ response = self.dnac._exec(
+ family="sites",
+ function='create_site',
+ op_modifies=True,
+ params=site_params,
+ )
+ self.log("Received API response from 'create_site': {0}".format(str(response)), "DEBUG")
+ site_created = True
+
+ if site_created or site_updated:
+ if response and isinstance(response, dict):
+ executionid = response.get("executionId")
+ while True:
+ execution_details = self.get_execution_details(executionid)
+ if execution_details.get("status") == "SUCCESS":
+ self.result['changed'] = True
+ self.result['response'] = execution_details
+ break
+
+ elif execution_details.get("bapiError"):
+ self.module.fail_json(msg=execution_details.get("bapiError"),
+ response=execution_details)
+ break
+
+ if site_updated:
+ self.msg = "Site - {0} Updated Successfully".format(self.want.get("site_name"))
+ self.log(self.msg, "INFO")
+ self.result['msg'] = self.msg
+ self.result['response'].update({"siteId": self.have.get("site_id")})
+
+ else:
+ # Get the site id of the newly created site.
+ (site_exists, current_site) = self.site_exists()
+
+ if site_exists:
+ self.msg = "Site '{0}' created successfully".format(self.want.get("site_name"))
+ self.log(self.msg, "INFO")
+ self.log("Current site (have): {0}".format(str(current_site)), "DEBUG")
+ self.result['msg'] = self.msg
+ self.result['response'].update({"siteId": current_site.get('site_id')})
+
+ return self
+
+ def delete_single_site(self, site_id, site_name):
+ """"
+ Delete a single site in the Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_id (str): The ID of the site to be deleted.
+ site_name (str): The name of the site to be deleted.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function initiates the deletion of a site in the Cisco Catalyst Center by calling the delete API.
+ If the deletion is successful, the result is marked as changed, and the status is set to "success."
+ If an error occurs during the deletion process, the status is set to "failed," and the log contains
+ details about the error.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function="delete_site",
+ params={"site_id": site_id},
+ )
+
+ if response and isinstance(response, dict):
+ self.log("Received API response from 'delete_site': {0}".format(str(response)), "DEBUG")
+ executionid = response.get("executionId")
+
+ while True:
+ execution_details = self.get_execution_details(executionid)
+ if execution_details.get("status") == "SUCCESS":
+ self.msg = "Site '{0}' deleted successfully".format(site_name)
+ self.result['changed'] = True
+ self.result['response'] = self.msg
+ self.status = "success"
+ self.log(self.msg, "INFO")
+ break
+ elif execution_details.get("bapiError"):
+ self.log("Error response for 'delete_site' execution: {0}".format(execution_details.get("bapiError")), "ERROR")
+ self.module.fail_json(msg=execution_details.get("bapiError"), response=execution_details)
+ break
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Exception occurred while deleting site '{0}' due to: {1}".format(site_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Call Cisco Catalyst Center API to delete sites with provided inputs.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): Dictionary containing information for site deletion.
+ Returns:
+ - self: The result dictionary includes the following keys:
+ - 'changed' (bool): Indicates whether changes were made
+ during the deletion process.
+ - 'response' (dict): Contains details about the execution
+ and the deleted site ID.
+ - 'msg' (str): A message indicating the status of the deletion operation.
+ Description:
+ This method initiates the deletion of a site by calling the 'delete_site' function in the 'sites' family
+ of the Cisco Catalyst Center API. It uses the site ID obtained from the 'have' attribute.
+ """
+
+ site_exists = self.have.get("site_exists")
+ site_name = self.want.get("site_name")
+ if not site_exists:
+ self.status = "success"
+ self.msg = "Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name)
+ self.result.update({'changed': False,
+ 'response': self.msg,
+ 'msg': self.msg})
+ self.log(self.msg, "INFO")
+
+ return self
+
+ # Check here if the site have the childs then fetch it using get membership API and then sort it
+ # in reverse order and start deleting from bottom to top
+ site_id = self.have.get("site_id")
+ mem_response = self.dnac._exec(
+ family="sites",
+ function="get_membership",
+ params={"site_id": site_id},
+ )
+ site_response = mem_response.get("site").get("response")
+ self.log("Site {0} response along with it's child sites: {1}".format(site_name, str(site_response)), "DEBUG")
+
+ if len(site_response) == 0:
+ self.delete_single_site(site_id, site_name)
+ return self
+
+ # Sorting the response in reverse order based on hierarchy levels
+ sorted_site_resp = sorted(site_response, key=lambda x: x.get("groupHierarchy"), reverse=True)
+
+ # Deleting each level in reverse order till topmost parent site
+ for item in sorted_site_resp:
+ self.delete_single_site(item['id'], item['name'])
+
+ # Delete the final parent site
+ self.delete_single_site(site_id, site_name)
+ self.msg = "The site '{0}' and its child sites have been deleted successfully".format(site_name)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Creation/Updation) of site configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ site exists in the Catalyst Center configuration.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ # Code to validate ccc config for merged state
+ site_exist = self.have.get("site_exists")
+ site_name = self.want.get("site_name")
+
+ if site_exist:
+ self.status = "success"
+ self.msg = "The requested site '{0}' is present in the Cisco Catalyst Center and its creation has been verified.".format(site_name)
+ self.log(self.msg, "INFO")
+
+ require_update = self.site_requires_update()
+
+ if not require_update:
+ self.log("The update for site '{0}' has been successfully verified.".format(site_name), "INFO")
+ self. status = "success"
+ return self
+
+ self.log("""The playbook input for site '{0}' does not align with the Cisco Catalyst Center, indicating that the merge task
+ may not have executed successfully.""".format(site_name), "INFO")
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of site configuration in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified site exists in the Catalyst Center configuration.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ # Code to validate ccc config for delete state
+ site_exist = self.have.get("site_exists")
+
+ if not site_exist:
+ self.status = "success"
+ msg = """The requested site '{0}' has already been deleted from the Cisco Catalyst Center and this has been
+ successfully verified.""".format(self.want.get("site_name"))
+ self.log(msg, "INFO")
+ return self
+ self.log("""Mismatch between the playbook input for site '{0}' and the Cisco Catalyst Center indicates that
+ the deletion was not executed successfully.""".format(self.want.get("site_name")), "INFO")
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_site = Site(module)
+ state = ccc_site.params.get("state")
+
+ if state not in ccc_site.supported_states:
+ ccc_site.status = "invalid"
+ ccc_site.msg = "State {0} is invalid".format(state)
+ ccc_site.check_return_status()
+
+ ccc_site.validate_input().check_return_status()
+ config_verify = ccc_site.params.get("config_verify")
+
+ for config in ccc_site.validated_config:
+ ccc_site.reset_values()
+ ccc_site.get_want(config).check_return_status()
+ ccc_site.get_have(config).check_return_status()
+ ccc_site.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_site.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_site.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_intent.py b/ansible_collections/cisco/dnac/plugins/modules/swim_intent.py
index ca173fb44..08f78ac30 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_intent.py
@@ -7,7 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary")
+__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary, Abhishek Maheshwari")
DOCUMENTATION = r"""
---
@@ -15,53 +15,85 @@ module: swim_intent
short_description: Intent module for SWIM related functions
description:
- Manage operation related to image importation, distribution, activation and tagging image as golden
-- API to fetch a software image from remote file system using URL for HTTP/FTP and upload it to DNA Center.
+- API to fetch a software image from remote file system using URL for HTTP/FTP and upload it to Catalyst Center.
Supported image files extensions are bin, img, tar, smu, pie, aes, iso, ova, tar_gz and qcow2.
-- API to fetch a software image from local file system and upload it to DNA Center
+- API to fetch a software image from local file system and upload it to Catalyst Center
Supported image files extensions are bin, img, tar, smu, pie, aes, iso, ova, tar_gz and qcow2.
- API to tag/untag image as golen for a given family of devices
- API to distribute a software image on a given device. Software image must be imported successfully into
- DNA Center before it can be distributed.
+ Catalyst Center before it can be distributed.
- API to activate a software image on a given device. Software image must be present in the device flash.
version_added: '6.6.0'
extends_documentation_fragment:
- cisco.dnac.intent_params
author: Madhan Sankaranarayanan (@madhansansel)
Rishita Chowdhary (@rishitachowdhary)
+ Abhishek Maheshwari (@abmahesh)
options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Catalyst Center after module completion.
+ type: str
+ choices: [ merged ]
+ default: merged
config:
description: List of details of SWIM image being managed
type: list
elements: dict
required: True
suboptions:
- importImageDetails:
+ import_image_details:
description: Details of image being imported
type: dict
suboptions:
type:
- description: The source of import, supports url import or local import.
+ description: Specifies the import source, supporting local file import (local) or remote url import (remote).
type: str
- localImageDetails:
+ local_image_details:
description: Details of the local path of the image to be imported.
type: dict
suboptions:
- filePath:
- description: File absolute path.
+ file_path:
+ description: Provide the absolute file path needed to import an image from your local system (Eg "/path/to/your/file").
+ Accepted files formats are - .gz,.bin,.img,.tar,.smu,.pie,.aes,.iso,.ova,.tar_gz,.qcow2,.nfvispkg,.zip,.spa,.rpm.
type: str
- isThirdParty:
- description: IsThirdParty query parameter. Third party Image check.
+ is_third_party:
+ description: Query parameter to determine if the image is from a third party (optional).
type: bool
- thirdPartyApplicationType:
- description: ThirdPartyApplicationType query parameter. Third Party Application Type.
+ third_party_application_type:
+ description: Specify the ThirdPartyApplicationType query parameter to indicate the type of third-party application. Allowed
+ values include WLC, LINUX, FIREWALL, WINDOWS, LOADBALANCER, THIRDPARTY, etc.(optional).
+ WLC (Wireless LAN Controller) - It's a network device that manages and controls multiple wireless access points (APs) in a
+ centralized manner.
+ LINUX - It's an open-source operating system that provides a complete set of software packages and utilities.
+ FIREWALL - It's a network security device that monitors and controls incoming and outgoing network traffic based on
+ predetermined security rules.It acts as a barrier between a trusted internal network and untrusted external networks
+ (such as the internet), preventing unauthorized access.
+ WINDOWS - It's an operating system known for its graphical user interface (GUI) support, extensive compatibility with hardware
+ and software, and widespread use across various applications.
+ LOADBALANCER - It's a network device or software application that distributes incoming network traffic across multiple servers
+ or resources.
+ THIRDPARTY - It refers to third-party images or applications that are not part of the core system.
+ NAM (Network Access Manager) - It's a network management tool or software application that provides centralized control and
+ monitoring of network access policies, user authentication, and device compliance.
+ WAN Optimization - It refers to techniques and technologies used to improve the performance and efficiency of WANs. It includes
+ various optimization techniques such as data compression, caching, protocol optimization, and traffic prioritization to reduce
+ latency, increase throughput, and improve user experience over WAN connections.
+ Unknown - It refers to an unspecified or unrecognized application type.
+ Router - It's a network device that forwards data packets between computer networks. They are essential for connecting multiple
+ networks together and directing traffic between them.
type: str
- thirdPartyImageFamily:
- description: ThirdPartyImageFamily query parameter. Third Party image family.
+ third_party_image_family:
+ description: Provide the ThirdPartyImageFamily query parameter to identify the family of the third-party image. Image Family name
+ like PALOALTO, RIVERBED, FORTINET, CHECKPOINT, SILVERPEAK etc. (optional).
type: str
- thirdPartyVendor:
- description: ThirdPartyVendor query parameter. Third Party Vendor.
+ third_party_vendor:
+ description: Include the ThirdPartyVendor query parameter to specify the vendor of the third party.
type: str
- urlDetails:
+ url_details:
description: URL details for SWIM import
type: dict
suboptions:
@@ -70,108 +102,210 @@ options:
type: list
elements: dict
suboptions:
- applicationType:
- description: Swim Import Via Url's applicationType.
+ application_type:
+ description: An optional parameter that specifies the type of application. Allowed values include WLC, LINUX, FIREWALL, WINDOWS,
+ LOADBALANCER, THIRDPARTY, etc. This is only applicable for third-party image types(optional).
+ WLC (Wireless LAN Controller) - It's network device that manages and controls multiple wireless access points (APs) in a
+ centralized manner.
+ LINUX - It's an open source which provide complete operating system with a wide range of software packages and utilities.
+ FIREWALL - It's a network security device that monitors and controls incoming and outgoing network traffic based on
+ predetermined security rules.It acts as a barrier between a trusted internal network and untrusted external networks
+ (such as the internet), preventing unauthorized access.
+ WINDOWS - It's an OS which provides GUI support for various applications, and extensive compatibility with hardware
+ and software.
+ LOADBALANCER - It's a network device or software application that distributes incoming network traffic across multiple servers
+ or resources.
+ THIRDPARTY - It refers to third-party images or applications that are not part of the core system.
+ NAM (Network Access Manager) - It's a network management tool or software application that provides centralized control and
+ monitoring of network access policies, user authentication, and device compliance.
+ WAN Optimization - It refers to techniques and technologies used to improve the performance and efficiency of WANs. It includes
+ various optimization techniques such as data compression, caching, protocol optimization, and traffic prioritization to reduce
+ latency, increase throughput, and improve user experience over WAN connections.
+ Unknown - It refers to an unspecified or unrecognized application type.
+ Router - It's a network device that forwards data packets between computer networks. They are essential for connecting multiple
+ networks together and directing traffic between them.
type: str
- imageFamily:
- description: Swim Import Via Url's imageFamily.
+ image_family:
+ description: Represents the name of the image family and is applicable only when uploading third-party images. Image Family name
+ like PALOALTO, RIVERBED, FORTINET, CHECKPOINT, SILVERPEAK etc. (optional).
type: str
- sourceURL:
- description: Swim Import Via Url's sourceURL.
+ source_url:
+ description: A mandatory parameter for importing a SWIM image via a remote URL. This parameter is required when using a URL
+ to import an image.(For example, http://{host}/swim/cat9k_isoxe.16.12.10s.SPA.bin,
+ ftp://user:password@{host}/swim/cat9k_isoxe.16.12.10s.SPA.iso)
type: str
- thirdParty:
- description: ThirdParty flag.
+ is_third_party:
+ description: Flag indicates whether the image is uploaded from a third party (optional).
type: bool
vendor:
- description: Swim Import Via Url's vendor.
+ description: The name of the vendor, that applies only to third-party image types when importing via URL (optional).
type: str
- scheduleAt:
+ schedule_at:
description: ScheduleAt query parameter. Epoch Time (The number of milli-seconds since
- January 1 1970 UTC) at which the distribution should be scheduled (Optional).
+ January 1 1970 UTC) at which the distribution should be scheduled (optional).
type: str
- scheduleDesc:
- description: ScheduleDesc query parameter. Custom Description (Optional).
+ schedule_desc:
+ description: ScheduleDesc query parameter. Custom Description (optional).
type: str
- scheduleOrigin:
- description: ScheduleOrigin query parameter. Originator of this call (Optional).
+ schedule_origin:
+ description: ScheduleOrigin query parameter. Originator of this call (optional).
type: str
- taggingDetails:
+ tagging_details:
description: Details for tagging or untagging an image as golden
type: dict
suboptions:
- imageName:
+ image_name:
description: SWIM image name which will be tagged or untagged as golden.
type: str
- deviceRole:
- description: Device Role. Permissible Values ALL, UNKNOWN, ACCESS, BORDER ROUTER,
- DISTRIBUTION and CORE.
+ device_role:
+ description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION, and CORE.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
type: str
- deviceFamilyName:
- description: Device family name
+ device_image_family_name:
+ description: Device Image family name(Eg Cisco Catalyst 9300 Switch)
type: str
- siteName:
+ site_name:
description: Site name for which SWIM image will be tagged/untagged as golden.
If not provided, SWIM image will be mapped to global site.
type: str
+ device_series_name:
+ description: This parameter specifies the name of the device series. It is used to identify a specific series of devices,
+ such as Cisco Catalyst 9300 Series Switches, within the Cisco Catalyst Center.
+ type: str
+ version_added: 6.12.0
tagging:
description: Booelan value to tag/untag SWIM image as golden
If True then the given image will be tagged as golden.
If False then the given image will be un-tagged as golden.
type: bool
- imageDistributionDetails:
+ image_distribution_details:
description: Details for SWIM image distribution. Device on which the image needs to distributed
can be speciifed using any of the following parameters - deviceSerialNumber,
deviceIPAddress, deviceHostname or deviceMacAddress.
type: dict
suboptions:
- imageName:
+ device_role:
+ description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION, and CORE.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ device_family_name:
+ description: Specify the name of the device family such as Switches and Hubs, etc.
+ type: str
+ site_name:
+ description: Used to get device details associated to this site.
+ type: str
+ device_series_name:
+ description: This parameter specifies the name of the device series. It is used to identify a specific series of devices,
+ such as Cisco Catalyst 9300 Series Switches, within the Cisco Catalyst Center.
+ type: str
+ version_added: 6.12.0
+ image_name:
description: SWIM image's name
type: str
- deviceSerialNumber:
+ device_serial_number:
description: Device serial number where the image needs to be distributed
type: str
- deviceIPAddress:
+ device_ip_address:
description: Device IP address where the image needs to be distributed
type: str
- deviceHostname:
+ device_hostname:
description: Device hostname where the image needs to be distributed
type: str
- deviceMacAddress:
+ device_mac_address:
description: Device MAC address where the image needs to be distributed
type: str
- imageActivationDetails:
+ image_activation_details:
description: Details for SWIM image activation. Device on which the image needs to activated
can be speciifed using any of the following parameters - deviceSerialNumber,
deviceIPAddress, deviceHostname or deviceMacAddress.
type: dict
suboptions:
- activateLowerImageVersion:
+ device_role:
+ description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION, and CORE.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ device_family_name:
+ description: Specify the name of the device family such as Switches and Hubs, etc.
+ type: str
+ site_name:
+ description: Used to get device details associated to this site.
+ type: str
+ activate_lower_image_version:
description: ActivateLowerImageVersion flag.
type: bool
- deviceUpgradeMode:
- description: Swim Trigger Activation's deviceUpgradeMode.
+ device_upgrade_mode:
+ description: It specifies the mode of upgrade to be applied to the devices having the following values - 'install', 'bundle', and 'currentlyExists'.
+ install - This mode instructs Cisco Catalyst Center to perform a clean installation of the new image on the target devices.
+ When this mode is selected, the existing image on the device is completely replaced with the new image during the upgrade process.
+ This ensures that the device runs only the new image version after the upgrade is completed.
+ bundle - This mode instructs Cisco Catalyst Center bundles the new image with the existing image on the device before initiating
+ the upgrade process. This mode allows for a more efficient upgrade process by preserving the existing image on the device while
+ adding the new image as an additional bundle. After the upgrade, the device can run either the existing image or the new bundled
+ image, depending on the configuration.
+ currentlyExists - This mode instructs Cisco Catalyst Center to checks if the target devices already have the desired image version
+ installed. If image already present on devices, no action is taken and upgrade process is skipped for those devices. This mode
+ is useful for avoiding unnecessary upgrades on devices that already have the correct image version installed, thereby saving time.
type: str
- distributeIfNeeded:
- description: DistributeIfNeeded flag.
+ distribute_if_needed:
+ description: Enable the distribute_if_needed option when activating the SWIM image.
type: bool
- imageName:
+ image_name:
description: SWIM image's name
type: str
- deviceSerialNumber:
+ device_serial_number:
description: Device serial number where the image needs to be activated
type: str
- deviceIPAddress:
+ device_ip_address:
description: Device IP address where the image needs to be activated
type: str
- deviceHostname:
+ device_hostname:
description: Device hostname where the image needs to be activated
type: str
- deviceMacAddress:
+ device_mac_address:
description: Device MAC address where the image needs to be activated
type: str
- scheduleValidate:
+ schedule_validate:
description: ScheduleValidate query parameter. ScheduleValidate, validates data
- before schedule (Optional).
+ before schedule (optional).
type: bool
requirements:
- dnacentersdk == 2.4.5
@@ -189,6 +323,8 @@ notes:
post /dna/intent/api/v1/image/distribution,
post /dna/intent/api/v1/image/activation/device,
+ - Added the parameter 'dnac_api_task_timeout', 'dnac_task_poll_interval' options in v6.13.2.
+
"""
EXAMPLES = r"""
@@ -201,41 +337,140 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
dnac_log: True
config:
- - importImageDetails:
- type: string
- urlDetails:
+ - import_image_details:
+ type: remote
+ url_details:
payload:
- - sourceURL: string
- isThirdParty: bool
- imageFamily: string
- vendor: string
- applicationType: string
- scheduleAt: string
- scheduleDesc: string
- scheduleOrigin: string
- taggingDetails:
- imageName: string
- deviceRole: string
- deviceFamilyName: string
- siteName: string
- tagging: bool
- imageDistributionDetails:
- imageName: string
- deviceSerialNumber: string
- imageActivationDetails:
- scheduleValidate: bool
- activateLowerImageVersion: bool
- distributeIfNeeded: bool
- deviceSerialNumber: string
- imageName: string
+ - source_url: "http://10.10.10.10/stda/cat9k_iosxe.17.12.01.SPA.bin"
+ is_third_party: False
+ tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+ image_distribution_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_serial_number: FJC2327U0S2
+ image_activation_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ schedule_validate: False
+ activate_lower_image_version: False
+ distribute_if_needed: True
+ device_serial_number: FJC2327U0S2
+
+- name: Import an image from local, tag it as golden.
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - import_image_details:
+ type: local
+ local_image_details:
+ file_path: /Users/Downloads/cat9k_iosxe.17.12.01.SPA.bin
+ is_third_party: False
+ tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+
+- name: Tag the given image as golden and load it on device
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+
+- name: Un-tagged the given image as golden and load it on device
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: False
+
+- name: Distribute the given image on devices associated to that site with specified role.
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - image_distribution_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ site_name: Global/USA/San Francisco/BGL_18
+ device_role: ALL
+ device_family_name: Switches and Hubs
+ device_series_name: Cisco Catalyst 9300 Series Switches
+
+- name: Activate the given image on devices associated to that site with specified role.
+ cisco.dnac.swim_intent:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - image_activation_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ site_name: Global/USA/San Francisco/BGL_18
+ device_role: ALL
+ device_family_name: Switches and Hubs
+ device_series_name: Cisco Catalyst 9300 Series Switches
+ scehdule_validate: False
+ activate_lower_image_version: True
+ distribute_if_needed: True
+
"""
RETURN = r"""
#Case: SWIM image is successfully imported, tagged as golden, distributed and activated on a device
response:
- description: A dictionary with activation details as returned by the DNAC Python SDK
+ description: A dictionary with activation details as returned by the Catalyst Center Python SDK
returned: always
type: dict
sample: >
@@ -259,85 +494,88 @@ response:
"""
-import copy
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
- DNACSDK,
+ DnacBase,
validate_list_of_dicts,
- log,
get_dict_result,
)
from ansible.module_utils.basic import AnsibleModule
+import os
+import time
-class DnacSwims:
+class DnacSwims(DnacBase):
+ """Class containing member attributes for Swim intent module"""
def __init__(self, module):
- self.module = module
- self.params = module.params
- self.config = copy.deepcopy(module.params.get("config"))
- self.have = {}
- self.want_create = {}
- self.diff_create = []
- self.validated = []
- dnac_params = self.get_dnac_params(self.params)
- log(str(dnac_params))
- self.dnac = DNACSDK(params=dnac_params)
- self.log = dnac_params.get("dnac_log")
-
- self.result = dict(changed=False, diff=[], response=[], warnings=[])
+ super().__init__(module)
+ self.supported_states = ["merged"]
def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.status = "success"
+ self.msg = "Configuration is not available in the playbook for validation"
+ self.log(self.msg, "ERROR")
+ return self
+
temp_spec = dict(
- importImageDetails=dict(type='dict'),
- taggingDetails=dict(type='dict'),
- imageDistributionDetails=dict(type='dict'),
- imageActivationDetails=dict(type='dict'),
+ import_image_details=dict(type='dict'),
+ tagging_details=dict(type='dict'),
+ image_distribution_details=dict(type='dict'),
+ image_activation_details=dict(type='dict'),
)
- if self.config:
- msg = None
- # Validate site params
- valid_temp, invalid_params = validate_list_of_dicts(
- self.config, temp_spec
- )
- if invalid_params:
- msg = "Invalid parameters in playbook: {0}".format(
- "\n".join(invalid_params)
- )
- self.module.fail_json(msg=msg)
-
- self.validated = valid_temp
- if self.log:
- log(str(valid_temp))
- log(str(self.validated))
-
- def get_dnac_params(self, params):
- dnac_params = dict(
- dnac_host=params.get("dnac_host"),
- dnac_port=params.get("dnac_port"),
- dnac_username=params.get("dnac_username"),
- dnac_password=params.get("dnac_password"),
- dnac_verify=params.get("dnac_verify"),
- dnac_debug=params.get("dnac_debug"),
- dnac_log=params.get("dnac_log")
- )
- return dnac_params
+ self.config = self.camel_to_snake_case(self.config)
- def get_task_details(self, id):
- result = None
- response = self.dnac._exec(
- family="task",
- function='get_task_by_id',
- params={"task_id": id},
+ # Validate swim params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
)
- if self.log:
- log(str(response))
- if isinstance(response, dict):
- result = response.get("response")
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def site_exists(self, site_name):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ tuple: A tuple containing two values:
+ - site_exists (bool): A boolean indicating whether the site exists (True) or not (False).
+ - site_id (str or None): The ID of the site if it exists, or None if the site is not found.
+ Description:
+ This method checks the existence of a site in the Catalyst Center. If the site is found,it sets 'site_exists' to True,
+ retrieves the site's ID, and returns both values in a tuple. If the site does not exist, 'site_exists' is set
+ to False, and 'site_id' is None. If an exception occurs during the site lookup, an exception is raised.
+ """
- return result
-
- def site_exists(self):
site_exists = False
site_id = None
response = None
@@ -345,15 +583,15 @@ class DnacSwims:
response = self.dnac._exec(
family="sites",
function='get_site',
- params={"name": self.want.get("site_name")},
+ params={"name": site_name},
)
except Exception as e:
- self.module.fail_json(msg="Site not found")
+ self.msg = "An exception occurred: Site '{0}' does not exist in the Cisco Catalyst Center".format(site_name)
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(msg=self.msg)
if response:
- if self.log:
- log(str(response))
-
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
site = response.get("response")
site_id = site[0].get("id")
site_exists = True
@@ -361,247 +599,629 @@ class DnacSwims:
return (site_exists, site_id)
def get_image_id(self, name):
- # check if given image exists, if exists store image_id
+ """
+ Retrieve the unique image ID based on the provided image name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ name (str): The name of the software image to search for.
+ Returns:
+ str: The unique image ID (UUID) corresponding to the given image name.
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its name.
+ It extracts and returns the image ID if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
image_response = self.dnac._exec(
family="software_image_management_swim",
function='get_software_image_details',
params={"image_name": name},
)
-
- if self.log:
- log(str(image_response))
-
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
image_list = image_response.get("response")
+
if (len(image_list) == 1):
image_id = image_list[0].get("imageUuid")
- if self.log:
- log("Image Id: " + str(image_id))
+ self.log("SWIM image '{0}' has the ID: {1}".format(name, image_id), "INFO")
else:
- self.module.fail_json(msg="Image not found", response=image_response)
+ error_message = "SWIM image '{0}' could not be found".format(name)
+ self.log(error_message, "ERROR")
+ self.module.fail_json(msg=error_message, response=image_response)
return image_id
+ def get_image_name_from_id(self, image_id):
+ """
+ Retrieve the unique image name based on the provided image id.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ id (str): The unique image ID (UUID) of the software image to search for.
+ Returns:
+ str: The image name corresponding to the given unique image ID (UUID)
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its id.
+ It extracts and returns the image name if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
+ image_response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params={"image_uuid": image_id},
+ )
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+ image_list = image_response.get("response")
+
+ if (len(image_list) == 1):
+ image_name = image_list[0].get("name")
+ self.log("SWIM image '{0}' has been fetched successfully from Cisco Catalyst Center".format(image_name), "INFO")
+ else:
+ error_message = "SWIM image with Id '{0}' could not be found in Cisco Catalyst Center".format(image_id)
+ self.log(error_message, "ERROR")
+ self.module.fail_json(msg=error_message, response=image_response)
+
+ return image_name
+
+ def is_image_exist(self, name):
+ """
+ Retrieve the unique image ID based on the provided image name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ name (str): The name of the software image to search for.
+ Returns:
+ str: The unique image ID (UUID) corresponding to the given image name.
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its name.
+ It extracts and returns the image ID if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
+ image_exist = False
+ image_response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params={"image_name": name},
+ )
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+ image_list = image_response.get("response")
+
+ if (len(image_list) == 1):
+ image_exist = True
+
+ return image_exist
+
def get_device_id(self, params):
+ """
+ Retrieve the unique device ID based on the provided parameters.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ params (dict): A dictionary containing parameters to filter devices.
+ Returns:
+ str: The unique device ID corresponding to the filtered device.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve a list of devices based on the provided
+ filtering parameters. If a single matching device is found, it extracts and returns the device ID. If
+ no device or multiple devices match the criteria, it raises an exception.
+ """
+ device_id = None
response = self.dnac._exec(
family="devices",
function='get_device_list',
params=params,
)
- if self.log:
- log(str(response))
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
device_list = response.get("response")
if (len(device_list) == 1):
device_id = device_list[0].get("id")
- if self.log:
- log("Device Id: " + str(device_id))
+ self.log("Device Id: {0}".format(str(device_id)), "INFO")
else:
- self.module.fail_json(msg="Device not found", response=response)
+ self.msg = "Device with params: '{0}' not found in Cisco Catalyst Center so can't fetch the device id".format(str(params))
+ self.log(self.msg, "WARNING")
return device_id
+ def get_device_uuids(self, site_name, device_family, device_role, device_series_name=None):
+ """
+ Retrieve a list of device UUIDs based on the specified criteria.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_name (str): The name of the site for which device UUIDs are requested.
+ device_family (str): The family/type of devices to filter on.
+ device_role (str): The role of devices to filter on. If None, 'ALL' roles are considered.
+ device_series_name(str): Specifies the name of the device series.
+ Returns:
+ list: A list of device UUIDs that match the specified criteria.
+ Description:
+ The function checks the reachability status and role of devices in the given site.
+ Only devices with "Reachable" status are considered, and filtering is based on the specified
+ device family and role (if provided).
+ """
+
+ device_uuid_list = []
+ if not site_name:
+ site_name = "Global"
+ self.log("Since site name is not given so it will be fetch all the devices under Global and mark site name as 'Global'", "INFO")
+
+ (site_exists, site_id) = self.site_exists(site_name)
+ if not site_exists:
+ self.log("""Site '{0}' is not found in the Cisco Catalyst Center, hence unable to fetch associated
+ devices.""".format(site_name), "INFO")
+ return device_uuid_list
+
+ if device_series_name:
+ if device_series_name.startswith(".*") and device_series_name.endswith(".*"):
+ self.log("Device series name '{0}' is already in the regex format".format(device_series_name), "INFO")
+ else:
+ device_series_name = ".*" + device_series_name + ".*"
+
+ site_params = {
+ "site_id": site_id,
+ "device_family": device_family
+ }
+ response = self.dnac._exec(
+ family="sites",
+ function='get_membership',
+ op_modifies=True,
+ params=site_params,
+ )
+ self.log("Received API response from 'get_membership': {0}".format(str(response)), "DEBUG")
+ response = response['device']
+
+ site_response_list = []
+ for item in response:
+ if item['response']:
+ for item_dict in item['response']:
+ site_response_list.append(item_dict)
+
+ if device_role.upper() == 'ALL':
+ device_role = None
+
+ device_params = {
+ 'series': device_series_name,
+ 'family': device_family,
+ 'role': device_role
+ }
+ device_list_response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ op_modifies=True,
+ params=device_params,
+ )
+
+ device_response = device_list_response.get('response')
+ if not response or not device_response:
+ self.log("Failed to retrieve devices associated with the site '{0}' due to empty API response.".format(site_name), "INFO")
+ return device_uuid_list
+
+ site_memberships_ids, device_response_ids = [], []
+
+ for item in site_response_list:
+ if item["reachabilityStatus"] != "Reachable":
+ self.log("""Device '{0}' is currently '{1}' and cannot be included in the SWIM distribution/activation
+ process.""".format(item["managementIpAddress"], item["reachabilityStatus"]), "INFO")
+ continue
+ self.log("""Device '{0}' from site '{1}' is ready for the SWIM distribution/activation
+ process.""".format(item["managementIpAddress"], site_name), "INFO")
+ site_memberships_ids.append(item["instanceUuid"])
+
+ for item in device_response:
+ if item["reachabilityStatus"] != "Reachable":
+ self.log("""Unable to proceed with the device '{0}' for SWIM distribution/activation as its status is
+ '{1}'.""".format(item["managementIpAddress"], item["reachabilityStatus"]), "INFO")
+ continue
+ self.log("""Device '{0}' matches to the specified filter requirements and is set for SWIM
+ distribution/activation.""".format(item["managementIpAddress"]), "INFO")
+ device_response_ids.append(item["instanceUuid"])
+
+ # Find the intersection of device IDs with the response get from get_membership api and get_device_list api with provided filters
+ device_uuid_list = set(site_memberships_ids).intersection(set(device_response_ids))
+
+ return device_uuid_list
+
def get_device_family_identifier(self, family_name):
+ """
+ Retrieve and store the device family identifier based on the provided family name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ family_name (str): The name of the device family for which to retrieve the identifier.
+ Returns:
+ None
+ Raises:
+ AnsibleFailJson: If the family name is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve a list of device family identifiers.It then
+ searches for a specific family name within the response and stores its associated identifier. If the family
+ name is found, the identifier is stored; otherwise, an exception is raised.
+ """
+
have = {}
response = self.dnac._exec(
family="software_image_management_swim",
function='get_device_family_identifiers',
)
- if self.log:
- log(str(response))
+ self.log("Received API response from 'get_device_family_identifiers': {0}".format(str(response)), "DEBUG")
device_family_db = response.get("response")
+
if device_family_db:
device_family_details = get_dict_result(device_family_db, 'deviceFamily', family_name)
+
if device_family_details:
device_family_identifier = device_family_details.get("deviceFamilyIdentifier")
have["device_family_identifier"] = device_family_identifier
- if self.log:
- log("Family device indentifier:" + str(device_family_identifier))
+ self.log("Family device indentifier: {0}".format(str(device_family_identifier)), "INFO")
else:
- self.module.fail_json(msg="Family Device Name not found", response=[])
+ self.msg = "Device Family: {0} not found".format(str(family_name))
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(msg=self.msg, response=[self.msg])
self.have.update(have)
def get_have(self):
+ """
+ Retrieve and store various software image and device details based on user-provided information.
+ Returns:
+ self: The current instance of the class with updated 'have' attributes.
+ Raises:
+ AnsibleFailJson: If required image or device details are not provided.
+ Description:
+ This function populates the 'have' dictionary with details related to software images, site information,
+ device families, distribution devices, and activation devices based on user-provided data in the 'want' dictionary.
+ It validates and retrieves the necessary information from Cisco Catalyst Center to support later actions.
+ """
+
if self.want.get("tagging_details"):
have = {}
tagging_details = self.want.get("tagging_details")
- if tagging_details.get("imageName"):
- image_id = self.get_image_id(tagging_details.get("imageName"))
+ if tagging_details.get("image_name"):
+ name = tagging_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
have["tagging_image_id"] = image_id
elif self.have.get("imported_image_id"):
have["tagging_image_id"] = self.have.get("imported_image_id")
else:
+ self.log("Image details for tagging not provided", "CRITICAL")
self.module.fail_json(msg="Image details for tagging not provided", response=[])
# check if given site exists, store siteid
# if not then use global site
- site_name = tagging_details.get("siteName")
+ site_name = tagging_details.get("site_name")
if site_name:
site_exists = False
- (site_exists, site_id) = self.site_exists()
+ (site_exists, site_id) = self.site_exists(site_name)
if site_exists:
have["site_id"] = site_id
- if self.log:
- log("Site Exists: " + str(site_exists) + "\n Site_id:" + str(site_id))
+ self.log("Site {0} exists having the site id: {1}".format(site_name, str(site_id)), "DEBUG")
else:
# For global site, use -1 as siteId
have["site_id"] = "-1"
- if self.log:
- log("Site Name not given by user. Using global site.")
+ self.log("Site Name not given by user. Using global site.", "WARNING")
self.have.update(have)
# check if given device family name exists, store indentifier value
- family_name = tagging_details.get("deviceFamilyName")
+ family_name = tagging_details.get("device_image_family_name")
self.get_device_family_identifier(family_name)
if self.want.get("distribution_details"):
have = {}
distribution_details = self.want.get("distribution_details")
+ site_name = distribution_details.get("site_name")
+ if site_name:
+ site_exists = False
+ (site_exists, site_id) = self.site_exists(site_name)
+
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("Site '{0}' exists and has the site ID: {1}".format(site_name, str(site_id)), "DEBUG")
+
# check if image for distributon is available
- if distribution_details.get("imageName"):
- image_id = self.get_image_id(distribution_details.get("imageName"))
+ if distribution_details.get("image_name"):
+ name = distribution_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
have["distribution_image_id"] = image_id
elif self.have.get("imported_image_id"):
have["distribution_image_id"] = self.have.get("imported_image_id")
else:
- self.module.fail_json(msg="Image details for distribution not provided", response=[])
+ self.log("Image details required for distribution have not been provided", "ERROR")
+ self.module.fail_json(msg="Image details required for distribution have not been provided", response=[])
device_params = dict(
- hostname=distribution_details.get("deviceHostname"),
- serial_number=distribution_details.get("deviceSerialNumber"),
- management_ip_address=distribution_details.get("deviceIpAddress"),
- mac_address=distribution_details.get("deviceMacAddress"),
+ hostname=distribution_details.get("device_hostname"),
+ serialNumber=distribution_details.get("device_serial_number"),
+ managementIpAddress=distribution_details.get("device_ip_address"),
+ macAddress=distribution_details.get("device_mac_address"),
)
device_id = self.get_device_id(device_params)
- have["distribution_device_id"] = device_id
+
+ if device_id is not None:
+ have["distribution_device_id"] = device_id
+
self.have.update(have)
if self.want.get("activation_details"):
have = {}
activation_details = self.want.get("activation_details")
# check if image for activation is available
- if activation_details.get("imageName"):
- image_id = self.get_image_id(activation_details.get("imageName"))
+ if activation_details.get("image_name"):
+ name = activation_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
have["activation_image_id"] = image_id
elif self.have.get("imported_image_id"):
have["activation_image_id"] = self.have.get("imported_image_id")
-
else:
- self.module.fail_json(msg="Image details for activation not provided", response=[])
+ self.log("Image details required for activation have not been provided", "ERROR")
+ self.module.fail_json(msg="Image details required for activation have not been provided", response=[])
+
+ site_name = activation_details.get("site_name")
+ if site_name:
+ site_exists = False
+ (site_exists, site_id) = self.site_exists(site_name)
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("The site '{0}' exists and has the site ID '{1}'".format(site_name, str(site_id)), "INFO")
device_params = dict(
- hostname=activation_details.get("deviceHostname"),
- serial_number=activation_details.get("deviceSerialNumber"),
- management_ip_address=activation_details.get("deviceIpAddress"),
- mac_address=activation_details.get("deviceMacAddress"),
+ hostname=activation_details.get("device_hostname"),
+ serialNumber=activation_details.get("device_serial_number"),
+ managementIpAddress=activation_details.get("device_ip_address"),
+ macAddress=activation_details.get("device_mac_address"),
)
device_id = self.get_device_id(device_params)
- have["activation_device_id"] = device_id
+
+ if device_id is not None:
+ have["activation_device_id"] = device_id
self.have.update(have)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_want(self, config):
+ """
+ Retrieve and store import, tagging, distribution, and activation details from playbook configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration dictionary containing image import and other details.
+ Returns:
+ self: The current instance of the class with updated 'want' attributes.
+ Raises:
+ AnsibleFailJson: If an incorrect import type is specified.
+ Description:
+ This function parses the playbook configuration to extract information related to image
+ import, tagging, distribution, and activation. It stores these details in the 'want' dictionary
+ for later use in the Ansible module.
+ """
- def get_want(self):
want = {}
- for image in self.validated:
- if image.get("importImageDetails"):
- want["import_image"] = True
- want["import_type"] = image.get("importImageDetails").get("type").lower()
- if want["import_type"] == "url":
- want["url_import_details"] = image.get("importImageDetails").get("urlDetails")
- elif want["import_type"] == "local":
- want["local_import_details"] = image.get("importImageDetails").get("localImageDetails")
- else:
- self.module.fail_json(msg="Incorrect import type. Supported Values: local or url")
+ if config.get("import_image_details"):
+ want["import_image"] = True
+ want["import_type"] = config.get("import_image_details").get("type").lower()
+ if want["import_type"] == "remote":
+ want["url_import_details"] = config.get("import_image_details").get("url_details")
+ elif want["import_type"] == "local":
+ want["local_import_details"] = config.get("import_image_details").get("local_image_details")
+ else:
+ self.log("The import type '{0}' provided is incorrect. Only 'local' or 'remote' are supported.".format(want["import_type"]), "CRITICAL")
+ self.module.fail_json(msg="Incorrect import type. Supported Values: local or remote")
- want["tagging_details"] = image.get("taggingDetails")
- want["distribution_details"] = image.get("imageDistributionDetails")
- want["activation_details"] = image.get("imageActivationDetails")
+ want["tagging_details"] = config.get("tagging_details")
+ want["distribution_details"] = config.get("image_distribution_details")
+ want["activation_details"] = config.get("image_activation_details")
self.want = want
- if self.log:
- log(str(self.want))
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
def get_diff_import(self):
- if not self.want.get("import_image"):
- return
-
- if self.want.get("import_type") == "url":
- image_name = self.want.get("url_import_details").get("payload")[0].get("sourceURL")
- url_import_params = dict(
- payload=self.want.get("url_import_details").get("payload"),
- schedule_at=self.want.get("url_import_details").get("scheduleAt"),
- schedule_desc=self.want.get("url_import_details").get("scheduleDesc"),
- schedule_origin=self.want.get("url_import_details").get("scheduleOrigin"),
- )
- response = self.dnac._exec(
- family="software_image_management_swim",
- function='import_software_image_via_url',
- op_modifies=True,
- params=url_import_params,
- )
- else:
- image_name = self.want.get("local_import_details").get("filePath")
- local_import_params = dict(
- is_third_party=self.want.get("local_import_details").get("isThirdParty"),
- third_party_vendor=self.want.get("local_import_details").get("thirdPartyVendor"),
- third_party_image_family=self.want.get("local_import_details").get("thirdPartyImageFamily"),
- third_party_application_type=self.want.get("local_import_details").get("thirdPartyApplicationType"),
- file_path=self.want.get("local_import_details").get("filePath"),
- )
+ """
+ Check the image import type and fetch the image ID for the imported image for further use.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function checks the type of image import (URL or local) and proceeds with the import operation accordingly.
+ It then monitors the import task's progress and updates the 'result' dictionary. If the operation is successful,
+ 'changed' is set to True.
+ Additionally, if tagging, distribution, or activation details are provided, it fetches the image ID for the
+ imported image and stores it in the 'have' dictionary for later use.
+ """
+
+ try:
+ import_type = self.want.get("import_type")
+
+ if not import_type:
+ self.status = "success"
+ self.msg = "Error: Details required for importing SWIM image. Please provide the necessary information."
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ self.result['changed'] = False
+ return self
+
+ if import_type == "remote":
+ image_name = self.want.get("url_import_details").get("payload")[0].get("source_url")
+ else:
+ image_name = self.want.get("local_import_details").get("file_path")
+
+ # Code to check if the image already exists in Catalyst Center
+ name = image_name.split('/')[-1]
+ image_exist = self.is_image_exist(name)
+
+ import_key_mapping = {
+ 'source_url': 'sourceURL',
+ 'image_family': 'imageFamily',
+ 'application_type': 'applicationType',
+ 'is_third_party': 'thirdParty',
+ }
+
+ if image_exist:
+ image_id = self.get_image_id(name)
+ self.have["imported_image_id"] = image_id
+ self.msg = "Image '{0}' already exists in the Cisco Catalyst Center".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ return self
+
+ if self.want.get("import_type") == "remote":
+ import_payload_dict = {}
+ temp_payload = self.want.get("url_import_details").get("payload")[0]
+ keys_to_change = list(import_key_mapping.keys())
+
+ for key, val in temp_payload.items():
+ if key in keys_to_change:
+ api_key_name = import_key_mapping[key]
+ import_payload_dict[api_key_name] = val
+
+ import_image_payload = [import_payload_dict]
+ import_params = dict(
+ payload=import_image_payload,
+ scheduleAt=self.want.get("url_import_details").get("schedule_at"),
+ scheduleDesc=self.want.get("url_import_details").get("schedule_desc"),
+ scheduleOrigin=self.want.get("url_import_details").get("schedule_origin"),
+ )
+ import_function = 'import_software_image_via_url'
+ else:
+ file_path = self.want.get("local_import_details").get("file_path")
+ import_params = dict(
+ is_third_party=self.want.get("local_import_details").get("is_third_party"),
+ third_party_vendor=self.want.get("local_import_details").get("third_party_vendor"),
+ third_party_image_family=self.want.get("local_import_details").get("third_party_image_family"),
+ third_party_application_type=self.want.get("local_import_details").get("third_party_application_type"),
+ multipart_fields={'file': (os.path.basename(file_path), open(file_path, 'rb'), 'application/octet-stream')},
+ multipart_monitor_callback=None
+ )
+ import_function = 'import_local_software_image'
+
response = self.dnac._exec(
family="software_image_management_swim",
- function='import_local_software_image',
+ function=import_function,
op_modifies=True,
- params=local_import_params,
- file_paths=[('file_path', 'file')],
+ params=import_params,
)
+ self.log("Received API response from {0}: {1}".format(import_function, str(response)), "DEBUG")
- if self.log:
- log(str(response))
+ task_details = {}
+ task_id = response.get("response").get("taskId")
- task_details = {}
- task_id = response.get("response").get("taskId")
- while (True):
- task_details = self.get_task_details(task_id)
- if task_details and \
- ("completed successfully" in task_details.get("progress").lower()):
- self.result['changed'] = True
- self.result['msg'] = "Image imported successfully"
- break
+ while (True):
+ task_details = self.get_task_details(task_id)
+ name = image_name.split('/')[-1]
- if task_details and task_details.get("isError"):
- if "Image already exists" in task_details.get("failureReason"):
- self.result['msg'] = "Image already exists."
+ if task_details and \
+ ("completed successfully" in task_details.get("progress").lower()):
+ self.result['changed'] = True
+ self.status = "success"
+ self.msg = "Swim Image {0} imported successfully".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
break
- else:
- self.module.fail_json(msg=task_details.get("failureReason"),
- response=task_details)
-
- self.result['response'] = task_details if task_details else response
- if not (self.want.get("tagging_details") or self.want.get("distribution_details")
- or self.want.get("activation_details")):
- return
- # Fetch image_id for the imported image for further use
- image_name = image_name.split('/')[-1]
- image_id = self.get_image_id(image_name)
- self.have["imported_image_id"] = image_id
+
+ if task_details and task_details.get("isError"):
+ if "already exists" in task_details.get("failureReason", ""):
+ self.msg = "SWIM Image {0} already exists in the Cisco Catalyst Center".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ break
+ else:
+ self.status = "failed"
+ self.msg = task_details.get("failureReason", "SWIM Image {0} seems to be invalid".format(image_name))
+ self.log(self.msg, "WARNING")
+ self.result['response'] = self.msg
+ return self
+
+ self.result['response'] = task_details if task_details else response
+
+ # Fetch image_id for the imported image for further use
+ image_name = image_name.split('/')[-1]
+ image_id = self.get_image_id(image_name)
+ self.have["imported_image_id"] = image_id
+
+ return self
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error: Import image details are not provided in the playbook, or the Import Image API was not
+ triggered successfully. Please ensure the necessary details are provided and verify the status of the Import Image process."""
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ return self
def get_diff_tagging(self):
+ """
+ Tag or untag a software image as golden based on provided tagging details.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function tags or untags a software image as a golden image in Cisco Catalyst Center based on the provided
+ tagging details. The tagging action is determined by the value of the 'tagging' attribute
+ in the 'tagging_details' dictionary.If 'tagging' is True, the image is tagged as golden, and if 'tagging'
+ is False, the golden tag is removed. The function sends the appropriate request to Cisco Catalyst Center and updates the
+ task details in the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
tagging_details = self.want.get("tagging_details")
tag_image_golden = tagging_details.get("tagging")
+ image_name = self.get_image_name_from_id(self.have.get("tagging_image_id"))
+
+ image_params = dict(
+ image_id=self.have.get("tagging_image_id"),
+ site_id=self.have.get("site_id"),
+ device_family_identifier=self.have.get("device_family_identifier"),
+ device_role=tagging_details.get("device_role", "ALL").upper()
+ )
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_golden_tag_status_of_an_image',
+ op_modifies=True,
+ params=image_params
+ )
+ self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG")
+
+ response = response.get('response')
+ if response:
+ image_status = response['taggedGolden']
+ if image_status and image_status == tag_image_golden:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ if not image_status and image_status == tag_image_golden:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
if tag_image_golden:
image_params = dict(
imageId=self.have.get("tagging_image_id"),
siteId=self.have.get("site_id"),
deviceFamilyIdentifier=self.have.get("device_family_identifier"),
- deviceRole=tagging_details.get("deviceRole")
+ deviceRole=tagging_details.get("device_role", "ALL").upper()
)
- if self.log:
- log("Image params for tagging image as golden:" + str(image_params))
+ self.log("Parameters for tagging the image as golden: {0}".format(str(image_params)), "INFO")
response = self.dnac._exec(
family="software_image_management_swim",
@@ -609,16 +1229,10 @@ class DnacSwims:
op_modifies=True,
params=image_params
)
+ self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG")
else:
- image_params = dict(
- image_id=self.have.get("tagging_image_id"),
- site_id=self.have.get("site_id"),
- device_family_identifier=self.have.get("device_family_identifier"),
- device_role=tagging_details.get("deviceRole")
- )
- if self.log:
- log("Image params for un-tagging image as golden:" + str(image_params))
+ self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO")
response = self.dnac._exec(
family="software_image_management_swim",
@@ -626,126 +1240,669 @@ class DnacSwims:
op_modifies=True,
params=image_params
)
+ self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG")
- if response:
- task_details = {}
- task_id = response.get("response").get("taskId")
+ if not response:
+ self.status = "failed"
+ self.msg = "Did not get the response of API so cannot check the Golden tagging status of image - {0}".format(image_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while True:
task_details = self.get_task_details(task_id)
- if not task_details.get("isError"):
+
+ if not task_details.get("isError") and 'successful' in task_details.get("progress"):
+ self.status = "success"
self.result['changed'] = True
- self.result['msg'] = task_details.get("progress")
+ self.msg = task_details.get("progress")
+ self.result['msg'] = self.msg
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+ elif task_details.get("isError"):
+ failure_reason = task_details.get("failureReason", "")
+ if failure_reason and "An inheritted tag cannot be un-tagged" in failure_reason:
+ self.status = "failed"
+ self.result['changed'] = False
+ self.msg = failure_reason
+ self.result['msg'] = failure_reason
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+ else:
+ error_message = task_details.get("failureReason", "Error: while tagging/un-tagging the golden swim image.")
+ self.status = "failed"
+ self.msg = error_message
+ self.result['msg'] = error_message
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
- self.result['response'] = task_details if task_details else response
+ return self
+
+ def get_device_ip_from_id(self, device_id):
+ """
+ Retrieve the management IP address of a device from Cisco Catalyst Center using its ID.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_id (str): The unique identifier of the device in Cisco Catalyst Center.
+ Returns:
+ str: The management IP address of the specified device.
+ Raises:
+ Exception: If there is an error while retrieving the response from Cisco Catalyst Center.
+ Description:
+ This method queries Cisco Catalyst Center for the device details based on its unique identifier (ID).
+ It uses the 'get_device_list' function in the 'devices' family, extracts the management IP address
+ from the response, and returns it. If any error occurs during the process, an exception is raised
+ with an appropriate error message logged.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"id": device_id}
+ )
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get('response')[0]
+ device_ip = response.get("managementIpAddress")
+
+ return device_ip
+ except Exception as e:
+ error_message = "Error occurred while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def check_swim_task_status(self, swim_task_dict, swim_task_name):
+ """
+ Check the status of the SWIM (Software Image Management) task for each device.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ swim_task_dict (dict): A dictionary containing the mapping of device IP address to the respective task ID.
+ swim_task_name (str): The name of the SWIM task being checked which is either Distribution or Activation.
+ Returns:
+ tuple: A tuple containing two elements:
+ - device_ips_list (list): A list of device IP addresses for which the SWIM task failed.
+ - device_count (int): The count of devices for which the SWIM task was successful.
+ Description:
+ This function iterates through the distribution_task_dict, which contains the mapping of
+ device IP address to their respective task ID. It checks the status of the SWIM task for each device by
+ repeatedly querying for task details until the task is either completed successfully or fails. If the task
+ is successful, the device count is incremented. If the task fails, an error message is logged, and the device
+ IP is appended to the device_ips_list and return a tuple containing the device_ips_list and device_count.
+ """
+
+ device_ips_list = []
+ device_count = 0
+
+ for device_ip, task_id in swim_task_dict.items():
+ start_time = time.time()
+ max_timeout = self.params.get('dnac_api_task_timeout')
+
+ while (True):
+ end_time = time.time()
+ if (end_time - start_time) >= max_timeout:
+ self.log("""Max timeout of {0} sec has reached for the task id '{1}' for the device '{2}' and unexpected
+ task status so moving out to next task id""".format(max_timeout, task_id, device_ip), "WARNING")
+ device_ips_list.append(device_ip)
+ break
+
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and \
+ ("completed successfully" in task_details.get("progress")):
+ self.result['changed'] = True
+ self.status = "success"
+ self.log("Image {0} successfully for the device '{1}".format(swim_task_name, device_ip), "INFO")
+ device_count += 1
+ break
+
+ if task_details.get("isError"):
+ error_msg = "Image {0} gets failed for the device '{1}'".format(swim_task_name, device_ip)
+ self.log(error_msg, "ERROR")
+ self.result['response'] = task_details
+ device_ips_list.append(device_ip)
+ break
+ time.sleep(self.params.get('dnac_task_poll_interval'))
+
+ return device_ips_list, device_count
def get_diff_distribution(self):
+ """
+ Get image distribution parameters from the playbook and trigger image distribution.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function retrieves image distribution parameters from the playbook's 'distribution_details' and triggers
+ the distribution of the specified software image to the specified device. It monitors the distribution task's
+ progress and updates the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
distribution_details = self.want.get("distribution_details")
- distribution_params = dict(
- payload=[dict(
- deviceUuid=self.have.get("distribution_device_id"),
- imageUuid=self.have.get("distribution_image_id")
+ site_name = distribution_details.get("site_name")
+ device_family = distribution_details.get("device_family_name")
+ device_role = distribution_details.get("device_role", "ALL")
+ device_series_name = distribution_details.get("device_series_name")
+ device_uuid_list = self.get_device_uuids(site_name, device_family, device_role, device_series_name)
+ image_id = self.have.get("distribution_image_id")
+ self.complete_successful_distribution = False
+ self.partial_successful_distribution = False
+ self.single_device_distribution = False
+
+ if self.have.get("distribution_device_id"):
+
+ distribution_params = dict(
+ payload=[dict(
+ deviceUuid=self.have.get("distribution_device_id"),
+ imageUuid=image_id
+ )]
+ )
+ self.log("Distribution Params: {0}".format(str(distribution_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_distribution',
+ op_modifies=True,
+ params=distribution_params,
+ )
+ self.log("Received API response from 'trigger_software_image_distribution': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while (True):
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and \
+ ("completed successfully" in task_details.get("progress")):
+ self.result['changed'] = True
+ self.status = "success"
+ self.single_device_distribution = True
+ self.result['msg'] = "Image with Id {0} Distributed Successfully".format(image_id)
+ break
+
+ if task_details.get("isError"):
+ self.status = "failed"
+ self.msg = "Image with Id {0} Distribution Failed".format(image_id)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = task_details
+ break
+
+ self.result['response'] = task_details if task_details else response
+
+ return self
+
+ if len(device_uuid_list) == 0:
+ self.status = "success"
+ self.msg = "The SWIM image distribution task could not proceed because no eligible devices were found."
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ self.log("Device UUIDs involved in Image Distribution: {0}".format(str(device_uuid_list)), "INFO")
+ distribution_task_dict = {}
+
+ for device_uuid in device_uuid_list:
+ device_management_ip = self.get_device_ip_from_id(device_uuid)
+ distribution_params = dict(
+ payload=[dict(
+ deviceUuid=device_uuid,
+ imageUuid=image_id
+ )]
+ )
+ self.log("Distribution Params: {0}".format(str(distribution_params)), "INFO")
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_distribution',
+ op_modifies=True,
+ params=distribution_params,
+ )
+ self.log("Received API response from 'trigger_software_image_distribution': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+ distribution_task_dict[device_management_ip] = task_id
+
+ device_ips_list, device_distribution_count = self.check_swim_task_status(distribution_task_dict, 'Distribution')
+
+ if device_distribution_count == 0:
+ self.status = "failed"
+ self.msg = "Image with Id {0} Distribution Failed for all devices".format(image_id)
+ elif device_distribution_count == len(device_uuid_list):
+ self.result['changed'] = True
+ self.status = "success"
+ self.complete_successful_distribution = True
+ self.msg = "Image with Id {0} Distributed Successfully for all devices".format(image_id)
+ else:
+ self.result['changed'] = True
+ self.status = "success"
+ self.partial_successful_distribution = False
+ self.msg = "Image with Id '{0}' Distributed and partially successfull".format(image_id)
+ self.log("For device(s) {0} image Distribution gets failed".format(str(device_ips_list)), "CRITICAL")
+
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_diff_activation(self):
+ """
+ Get image activation parameters from the playbook and trigger image activation.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function retrieves image activation parameters from the playbook's 'activation_details' and triggers the
+ activation of the specified software image on the specified device. It monitors the activation task's progress and
+ updates the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
+ activation_details = self.want.get("activation_details")
+ site_name = activation_details.get("site_name")
+ device_family = activation_details.get("device_family_name")
+ device_role = activation_details.get("device_role", "ALL")
+ device_series_name = activation_details.get("device_series_name")
+ device_uuid_list = self.get_device_uuids(site_name, device_family, device_role, device_series_name)
+ image_id = self.have.get("activation_image_id")
+ self.complete_successful_activation = False
+ self.partial_successful_activation = False
+ self.single_device_activation = False
+
+ if self.have.get("activation_device_id"):
+ payload = [dict(
+ activateLowerImageVersion=activation_details.get("activate_lower_image_version"),
+ deviceUpgradeMode=activation_details.get("device_upgrade_mode"),
+ distributeIfNeeded=activation_details.get("distribute_if_needed"),
+ deviceUuid=self.have.get("activation_device_id"),
+ imageUuidList=[image_id]
)]
- )
- if self.log:
- log("Distribution Params: " + str(distribution_params))
- response = self.dnac._exec(
- family="software_image_management_swim",
- function='trigger_software_image_distribution',
- op_modifies=True,
- params=distribution_params,
- )
- if response:
+ activation_params = dict(
+ schedule_validate=activation_details.get("scehdule_validate"),
+ payload=payload
+ )
+ self.log("Activation Params: {0}".format(str(activation_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_activation',
+ op_modifies=True,
+ params=activation_params,
+ )
+ self.log("Received API response from 'trigger_software_image_activation': {0}".format(str(response)), "DEBUG")
+
task_details = {}
task_id = response.get("response").get("taskId")
+
while (True):
task_details = self.get_task_details(task_id)
+
if not task_details.get("isError") and \
("completed successfully" in task_details.get("progress")):
self.result['changed'] = True
- self.result['msg'] = "Image Distributed Successfully"
+ self.result['msg'] = "Image Activated successfully"
+ self.status = "success"
+ self.single_device_activation = True
break
if task_details.get("isError"):
- self.module.fail_json(msg="Image Distribution Failed",
- response=task_details)
+ self.msg = "Activation for Image with Id '{0}' gets failed".format(image_id)
+ self.status = "failed"
+ self.result['response'] = task_details
+ self.log(self.msg, "ERROR")
+ return self
self.result['response'] = task_details if task_details else response
- def get_diff_activation(self):
- activation_details = self.want.get("activation_details")
- payload = [dict(
- activateLowerImageVersion=activation_details.get("activateLowerImageVersion"),
- deviceUpgradeMode=activation_details.get("deviceUpgradeMode"),
- distributeIfNeeded=activation_details.get("distributeIfNeeded"),
- deviceUuid=self.have.get("activation_device_id"),
- imageUuidList=[self.have.get("activation_image_id")]
- )]
- activation_params = dict(
- schedule_validate=activation_details.get("scehduleValidate"),
- payload=payload
+ return self
+
+ if len(device_uuid_list) == 0:
+ self.status = "success"
+ self.msg = "The SWIM image activation task could not proceed because no eligible devices were found."
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ self.log("Device UUIDs involved in Image Activation: {0}".format(str(device_uuid_list)), "INFO")
+ activation_task_dict = {}
+
+ for device_uuid in device_uuid_list:
+ device_management_ip = self.get_device_ip_from_id(device_uuid)
+ payload = [dict(
+ activateLowerImageVersion=activation_details.get("activate_lower_image_version"),
+ deviceUpgradeMode=activation_details.get("device_upgrade_mode"),
+ distributeIfNeeded=activation_details.get("distribute_if_needed"),
+ deviceUuid=device_uuid,
+ imageUuidList=[image_id]
+ )]
+
+ activation_params = dict(
+ schedule_validate=activation_details.get("scehdule_validate"),
+ payload=payload
+ )
+ self.log("Activation Params: {0}".format(str(activation_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_activation',
+ op_modifies=True,
+ params=activation_params,
+ )
+ self.log("Received API response from 'trigger_software_image_activation': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+ activation_task_dict[device_management_ip] = task_id
+
+ device_ips_list, device_activation_count = self.check_swim_task_status(activation_task_dict, 'Activation')
+
+ if device_activation_count == 0:
+ self.status = "failed"
+ self.msg = "Image with Id '{0}' activation failed for all devices".format(image_id)
+ elif device_activation_count == len(device_uuid_list):
+ self.result['changed'] = True
+ self.status = "success"
+ self.complete_successful_activation = True
+ self.msg = "Image with Id '{0}' activated successfully for all devices".format(image_id)
+ else:
+ self.result['changed'] = True
+ self.status = "success"
+ self.partial_successful_activation = True
+ self.msg = "Image with Id '{0}' activated and partially successfull".format(image_id)
+ self.log("For Device(s) {0} Image activation gets Failed".format(str(device_ips_list)), "CRITICAL")
+
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Get tagging details and then trigger distribution followed by activation if specified in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration dictionary containing tagging, distribution, and activation details.
+ Returns:
+ self: The current instance of the class with updated 'result' and 'have' attributes.
+ Description:
+ This function checks the provided playbook configuration for tagging, distribution, and activation details. It
+ then triggers these operations in sequence if the corresponding details are found in the configuration.The
+ function monitors the progress of each task and updates the 'result' dictionary accordingly. If any of the
+ operations are successful, 'changed' is set to True.
+ """
+
+ if config.get("tagging_details"):
+ self.get_diff_tagging().check_return_status()
+
+ if config.get("image_distribution_details"):
+ self.get_diff_distribution().check_return_status()
+
+ if config.get("image_activation_details"):
+ self.get_diff_activation().check_return_status()
+
+ return self
+
+ def verify_diff_imported(self, import_type):
+ """
+ Verify the successful import of a software image into Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ import_type (str): The type of import, either 'remote' or 'local'.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the successful import of a software image into Cisco Catalyst Center.
+ It checks whether the image exists in Catalyst Center based on the provided import type.
+ If the image exists, the status is set to 'success', and a success message is logged.
+ If the image does not exist, a warning message is logged indicating a potential import failure.
+ """
+
+ if import_type == "remote":
+ image_name = self.want.get("url_import_details").get("payload")[0].get("source_url")
+ else:
+ image_name = self.want.get("local_import_details").get("file_path")
+
+ # Code to check if the image already exists in Catalyst Center
+ name = image_name.split('/')[-1]
+ image_exist = self.is_image_exist(name)
+ if image_exist:
+ self.status = "success"
+ self.msg = "The requested Image '{0}' imported in the Cisco Catalyst Center and Image presence has been verified.".format(name)
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""The playbook input for SWIM Image '{0}' does not align with the Cisco Catalyst Center, indicating that image
+ may not have imported successfully.""".format(name), "INFO")
+
+ return self
+
+ def verify_diff_tagged(self):
+ """
+ Verify the Golden tagging status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the tagging status of a software image in Cisco Catalyst Center.
+ It retrieves tagging details from the input, including the desired tagging status and image ID.
+ Using the provided image ID, it obtains image parameters required for checking the image status.
+ The method then queries Catalyst Center to get the golden tag status of the image.
+ If the image status matches the desired tagging status, a success message is logged.
+ If there is a mismatch between the playbook input and the Catalyst Center, a warning message is logged.
+ """
+
+ tagging_details = self.want.get("tagging_details")
+ tag_image_golden = tagging_details.get("tagging")
+ image_id = self.have.get("tagging_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ image_params = dict(
+ image_id=self.have.get("tagging_image_id"),
+ site_id=self.have.get("site_id"),
+ device_family_identifier=self.have.get("device_family_identifier"),
+ device_role=tagging_details.get("device_role", "ALL").upper()
)
- if self.log:
- log("Activation Params: " + str(activation_params))
+ self.log("Parameters for checking the status of image: {0}".format(str(image_params)), "INFO")
response = self.dnac._exec(
family="software_image_management_swim",
- function='trigger_software_image_activation',
+ function='get_golden_tag_status_of_an_image',
op_modifies=True,
- params=activation_params,
+ params=image_params
)
- task_details = {}
- task_id = response.get("response").get("taskId")
- while (True):
- task_details = self.get_task_details(task_id)
- if not task_details.get("isError") and \
- ("completed successfully" in task_details.get("progress")):
- self.result['changed'] = True
- self.result['msg'] = "Image activated successfully"
- break
-
- if task_details.get("isError"):
- self.module.fail_json(msg="Image Activation Failed",
- response=task_details)
+ self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG")
- self.result['response'] = task_details if task_details else response
+ response = response.get('response')
+ if response:
+ image_status = response['taggedGolden']
+ if image_status == tag_image_golden:
+ if tag_image_golden:
+ self.msg = """The requested image '{0}' has been tagged as golden in the Cisco Catalyst Center and
+ its status has been successfully verified.""".format(image_name)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The requested image '{0}' has been un-tagged as golden in the Cisco Catalyst Center and
+ image status has been verified.""".format(image_name)
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook input for tagging/un-tagging image as golden and the Cisco Catalyst Center indicates that
+ the tagging/un-tagging task was not executed successfully.""", "INFO")
+
+ return self
+
+ def verify_diff_distributed(self):
+ """
+ Verify the distribution status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ import_type (str): The type of import, either 'url' or 'local'.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the distribution status of a software image in Cisco Catalyst Center.
+ It retrieves the image ID and name from the input and if distribution device ID is provided, it checks the distribution status for that
+ list of specific device and logs the info message based on distribution status.
+ """
+
+ image_id = self.have.get("distribution_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ if self.have.get("distribution_device_id"):
+ if self.single_device_distribution:
+ self.msg = """The requested image '{0}', associated with the device ID '{1}', has been successfully distributed in the Cisco Catalyst Center
+ and its status has been verified.""".format(image_name, self.have.get("distribution_device_id"))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook input for distributing the image to the device with ID '{0}' and the actual state in the
+ Cisco Catalyst Center suggests that the distribution task might not have been executed
+ successfully.""".format(self.have.get("distribution_device_id")), "INFO")
+ elif self.complete_successful_distribution:
+ self.msg = """The requested image '{0}', with ID '{1}', has been successfully distributed to all devices within the specified
+ site in the Cisco Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ elif self.partial_successful_distribution:
+ self.msg = """T"The requested image '{0}', with ID '{1}', has been partially distributed across some devices in the Cisco Catalyst
+ Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The requested image '{0}', with ID '{1}', failed to be distributed across devices in the Cisco Catalyst
+ Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_activated(self):
+ """
+ Verify the activation status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the activation status of a software image in Cisco Catalyst Center and retrieves the image ID and name from
+ the input. If activation device ID is provided, it checks the activation status for that specific device. Based on activation status
+ a corresponding message is logged.
+ """
+
+ image_id = self.have.get("activation_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ if self.have.get("activation_device_id"):
+ if self.single_device_activation:
+ self.msg = """The requested image '{0}', associated with the device ID '{1}', has been successfully activated in the Cisco Catalyst
+ Center and its status has been verified.""".format(image_name, self.have.get("activation_device_id"))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook's input for activating the image '{0}' on the device with ID '{1}' and the actual state in
+ the Cisco Catalyst Center suggests that the activation task might not have been executed
+ successfully.""".format(image_name, self.have.get("activation_device_id")), "INFO")
+ elif self.complete_successful_activation:
+ self.msg = """The requested image '{0}', with ID '{1}', has been successfully activated on all devices within the specified site in the
+ Cisco Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ elif self.partial_successful_activation:
+ self.msg = """"The requested image '{0}', with ID '{1}', has been partially activated on some devices in the Cisco
+ Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The activation of the requested image '{0}', with ID '{1}', failed on devices in the Cisco
+ Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Importing/Tagging/Distributing/Actiavting) the SWIM Image in devices in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ SWIM operation performed or not.
+ """
+
+ self.get_have()
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ import_type = self.want.get("import_type")
+ if import_type:
+ self.verify_diff_imported(import_type).check_return_status()
+
+ tagged = self.want.get("tagging_details")
+ if tagged:
+ self.verify_diff_tagged().check_return_status()
- def get_diff(self):
- if self.want.get("tagging_details"):
- self.get_diff_tagging()
+ distribution_details = self.want.get("distribution_details")
+ if distribution_details:
+ self.verify_diff_distributed().check_return_status()
- if self.want.get("distribution_details"):
- self.get_diff_distribution()
+ activation_details = self.want.get("activation_details")
+ if activation_details:
+ self.verify_diff_activated().check_return_status()
- if self.want.get("activation_details"):
- self.get_diff_activation()
+ return self
def main():
""" main entry point for module execution
"""
- element_spec = dict(
- dnac_host=dict(required=True, type='str'),
- dnac_port=dict(type='str', default='443'),
- dnac_username=dict(type='str', default='admin', aliases=["user"]),
- dnac_password=dict(type='str', no_log=True),
- dnac_verify=dict(type='bool', default='True'),
- dnac_version=dict(type="str", default="2.2.3.3"),
- dnac_debug=dict(type='bool', default=False),
- dnac_log=dict(type='bool', default=False),
- config=dict(required=True, type='list', elements='dict'),
- validate_response_schema=dict(type="bool", default=True),
- )
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged']}
+ }
module = AnsibleModule(argument_spec=element_spec,
supports_check_mode=False)
dnac_swims = DnacSwims(module)
- dnac_swims.validate_input()
- dnac_swims.get_want()
- dnac_swims.get_diff_import()
- dnac_swims.get_have()
- dnac_swims.get_diff()
+ state = dnac_swims.params.get("state")
+
+ if state not in dnac_swims.supported_states:
+ dnac_swims.status = "invalid"
+ dnac_swims.msg = "State {0} is invalid".format(state)
+ dnac_swims.check_return_status()
+
+ dnac_swims.validate_input().check_return_status()
+ config_verify = dnac_swims.params.get("config_verify")
+
+ for config in dnac_swims.validated_config:
+ dnac_swims.reset_values()
+ dnac_swims.get_want(config).check_return_status()
+ dnac_swims.get_diff_import().check_return_status()
+ dnac_swims.get_have().check_return_status()
+ dnac_swims.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_swims.verify_diff_state_apply[state](config).check_return_status()
module.exit_json(**dnac_swims.result)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/swim_workflow_manager.py
new file mode 100644
index 000000000..a147b4055
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_workflow_manager.py
@@ -0,0 +1,1896 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary, Abhishek Maheshwari")
+
+DOCUMENTATION = r"""
+---
+module: swim_workflow_manager
+short_description: workflow_manager module for SWIM related functions
+description:
+- Manage operation related to image importation, distribution, activation and tagging image as golden
+- API to fetch a software image from remote file system using URL for HTTP/FTP and upload it to Catalyst Center.
+ Supported image files extensions are bin, img, tar, smu, pie, aes, iso, ova, tar_gz and qcow2.
+- API to fetch a software image from local file system and upload it to Catalyst Center
+ Supported image files extensions are bin, img, tar, smu, pie, aes, iso, ova, tar_gz and qcow2.
+- API to tag/untag image as golen for a given family of devices
+- API to distribute a software image on a given device. Software image must be imported successfully into
+ Catalyst Center before it can be distributed.
+- API to activate a software image on a given device. Software image must be present in the device flash.
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Madhan Sankaranarayanan (@madhansansel)
+ Rishita Chowdhary (@rishitachowdhary)
+ Abhishek Maheshwari (@abmahesh)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Catalyst Center after module completion.
+ type: str
+ choices: [ merged ]
+ default: merged
+ config:
+ description: List of details of SWIM image being managed
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ import_image_details:
+ description: Details of image being imported
+ type: dict
+ suboptions:
+ type:
+ description: Specifies the import source, supporting local file import (local) or remote url import (remote).
+ type: str
+ local_image_details:
+ description: Details of the local path of the image to be imported.
+ type: dict
+ suboptions:
+ file_path:
+ description: Provide the absolute file path needed to import an image from your local system (Eg "/path/to/your/file").
+ Accepted files formats are - .gz,.bin,.img,.tar,.smu,.pie,.aes,.iso,.ova,.tar_gz,.qcow2,.nfvispkg,.zip,.spa,.rpm.
+ type: str
+ is_third_party:
+ description: Query parameter to determine if the image is from a third party (optional).
+ type: bool
+ third_party_application_type:
+ description: Specify the ThirdPartyApplicationType query parameter to indicate the type of third-party application. Allowed
+ values include WLC, LINUX, FIREWALL, WINDOWS, LOADBALANCER, THIRDPARTY, etc.(optional).
+ WLC (Wireless LAN Controller) - It's a network device that manages and controls multiple wireless access points (APs) in a
+ centralized manner.
+ LINUX - It's an open-source operating system that provides a complete set of software packages and utilities.
+ FIREWALL - It's a network security device that monitors and controls incoming and outgoing network traffic based on
+ predetermined security rules.It acts as a barrier between a trusted internal network and untrusted external networks
+ (such as the internet), preventing unauthorized access.
+ WINDOWS - It's an operating system known for its graphical user interface (GUI) support, extensive compatibility with hardware
+ and software, and widespread use across various applications.
+ LOADBALANCER - It's a network device or software application that distributes incoming network traffic across multiple servers
+ or resources.
+ THIRDPARTY - It refers to third-party images or applications that are not part of the core system.
+ NAM (Network Access Manager) - It's a network management tool or software application that provides centralized control and
+ monitoring of network access policies, user authentication, and device compliance.
+ WAN Optimization - It refers to techniques and technologies used to improve the performance and efficiency of WANs. It includes
+ various optimization techniques such as data compression, caching, protocol optimization, and traffic prioritization to reduce
+ latency, increase throughput, and improve user experience over WAN connections.
+ Unknown - It refers to an unspecified or unrecognized application type.
+ Router - It's a network device that forwards data packets between computer networks. They are essential for connecting multiple
+ networks together and directing traffic between them.
+ type: str
+ third_party_image_family:
+ description: Provide the ThirdPartyImageFamily query parameter to identify the family of the third-party image. Image Family name
+ like PALOALTO, RIVERBED, FORTINET, CHECKPOINT, SILVERPEAK etc. (optional).
+ type: str
+ third_party_vendor:
+ description: Include the ThirdPartyVendor query parameter to specify the vendor of the third party.
+ type: str
+ url_details:
+ description: URL details for SWIM import
+ type: dict
+ suboptions:
+ payload:
+ description: Swim Import Via Url's payload.
+ type: list
+ elements: dict
+ suboptions:
+ application_type:
+ description: An optional parameter that specifies the type of application. Allowed values include WLC, LINUX, FIREWALL, WINDOWS,
+ LOADBALANCER, THIRDPARTY, etc. This is only applicable for third-party image types(optional).
+ WLC (Wireless LAN Controller) - It's network device that manages and controls multiple wireless access points (APs) in a
+ centralized manner.
+ LINUX - It's an open source which provide complete operating system with a wide range of software packages and utilities.
+ FIREWALL - It's a network security device that monitors and controls incoming and outgoing network traffic based on
+ predetermined security rules.It acts as a barrier between a trusted internal network and untrusted external networks
+ (such as the internet), preventing unauthorized access.
+ WINDOWS - It's an OS which provides GUI support for various applications, and extensive compatibility with hardware
+ and software.
+ LOADBALANCER - It's a network device or software application that distributes incoming network traffic across multiple servers
+ or resources.
+ THIRDPARTY - It refers to third-party images or applications that are not part of the core system.
+ NAM (Network Access Manager) - It's a network management tool or software application that provides centralized control and
+ monitoring of network access policies, user authentication, and device compliance.
+ WAN Optimization - It refers to techniques and technologies used to improve the performance and efficiency of WANs. It includes
+ various optimization techniques such as data compression, caching, protocol optimization, and traffic prioritization to reduce
+ latency, increase throughput, and improve user experience over WAN connections.
+ Unknown - It refers to an unspecified or unrecognized application type.
+ Router - It's a network device that forwards data packets between computer networks. They are essential for connecting multiple
+ networks together and directing traffic between them.
+ type: str
+ image_family:
+ description: Represents the name of the image family and is applicable only when uploading third-party images. Image Family name
+ like PALOALTO, RIVERBED, FORTINET, CHECKPOINT, SILVERPEAK etc. (optional).
+ type: str
+ source_url:
+ description: A mandatory parameter for importing a SWIM image via a remote URL. This parameter is required when using a URL
+ to import an image..(For example, http://{host}/swim/cat9k_isoxe.16.12.10s.SPA.bin,
+ ftp://user:password@{host}/swim/cat9k_isoxe.16.12.10s.SPA.iso)
+ type: str
+ is_third_party:
+ description: Flag indicates whether the image is uploaded from a third party (optional).
+ type: bool
+ vendor:
+ description: The name of the vendor, that applies only to third-party image types when importing via URL (optional).
+ type: str
+ schedule_at:
+ description: ScheduleAt query parameter. Epoch Time (The number of milli-seconds since
+ January 1 1970 UTC) at which the distribution should be scheduled (optional).
+ type: str
+ schedule_desc:
+ description: ScheduleDesc query parameter. Custom Description (optional).
+ type: str
+ schedule_origin:
+ description: ScheduleOrigin query parameter. Originator of this call (optional).
+ type: str
+ tagging_details:
+ description: Details for tagging or untagging an image as golden
+ type: dict
+ suboptions:
+ image_name:
+ description: SWIM image name which will be tagged or untagged as golden.
+ type: str
+ device_role:
+ description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION, and CORE.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ device_image_family_name:
+ description: Device Image family name(Eg Cisco Catalyst 9300 Switch)
+ type: str
+ site_name:
+ description: Site name for which SWIM image will be tagged/untagged as golden.
+ If not provided, SWIM image will be mapped to global site.
+ type: str
+ tagging:
+ description: Booelan value to tag/untag SWIM image as golden
+ If True then the given image will be tagged as golden.
+ If False then the given image will be un-tagged as golden.
+ type: bool
+ image_distribution_details:
+ description: Details for SWIM image distribution. Device on which the image needs to distributed
+ can be speciifed using any of the following parameters - deviceSerialNumber,
+ deviceIPAddress, deviceHostname or deviceMacAddress.
+ type: dict
+ suboptions:
+ device_role:
+ description: Device Role and permissible Values are ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION and CORE.
+ ALL - This role typically represents all devices within the network, regardless of their specific roles or functions.
+ UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center.
+ This could happen if the platform is unable to determine the device's role based on available information.
+ ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network.
+ These devices are often located at the edge of the network and provide connectivity to end-user devices.
+ BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as
+ gateways between different networks, such as connecting an enterprise network to the internet or connecting
+ multiple branch offices.
+ DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic
+ from access switches and route it toward the core of the network or toward other distribution switches.
+ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes
+ of traffic and provide connectivity between different parts of network, such as connecting distribution switches or
+ providing interconnection between different network segments.
+ type: str
+ device_family_name:
+ description: Specify the name of the device family such as Switches and Hubs, etc.
+ type: str
+ site_name:
+ description: Used to get device details associated to this site.
+ type: str
+ device_series_name:
+ description: This parameter specifies the name of the device series. It is used to identify a specific series of devices,
+ such as Cisco Catalyst 9300 Series Switches, within the Cisco Catalyst Center.
+ type: str
+ version_added: 6.12.0
+ image_name:
+ description: SWIM image's name
+ type: str
+ device_serial_number:
+ description: Device serial number where the image needs to be distributed
+ type: str
+ device_ip_address:
+ description: Device IP address where the image needs to be distributed
+ type: str
+ device_hostname:
+ description: Device hostname where the image needs to be distributed
+ type: str
+ device_mac_address:
+ description: Device MAC address where the image needs to be distributed
+ type: str
+ image_activation_details:
+ description: Details for SWIM image activation. Device on which the image needs to activated
+ can be speciifed using any of the following parameters - deviceSerialNumber,
+ deviceIPAddress, deviceHostname or deviceMacAddress.
+ type: dict
+ suboptions:
+ device_role:
+ description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER,
+ DISTRIBUTION, and CORE.
+ type: str
+ device_family_name:
+ description: Specify the name of the device family such as Switches and Hubs, etc.
+ type: str
+ site_name:
+ description: Used to get device details associated to this site.
+ type: str
+ device_series_name:
+ description: This parameter specifies the name of the device series. It is used to identify a specific series of devices,
+ such as Cisco Catalyst 9300 Series Switches, within the Cisco Catalyst Center.
+ type: str
+ version_added: 6.12.0
+ activate_lower_image_version:
+ description: ActivateLowerImageVersion flag.
+ type: bool
+ device_upgrade_mode:
+ description: It specifies the mode of upgrade to be applied to the devices having the following values - 'install', 'bundle', and 'currentlyExists'.
+ install - This mode instructs Cisco Catalyst Center to perform a clean installation of the new image on the target devices.
+ When this mode is selected, the existing image on the device is completely replaced with the new image during the upgrade process.
+ This ensures that the device runs only the new image version after the upgrade is completed.
+ bundle - This mode instructs Cisco Catalyst Center bundles the new image with the existing image on the device before initiating
+ the upgrade process. This mode allows for a more efficient upgrade process by preserving the existing image on the device while
+ adding the new image as an additional bundle. After the upgrade, the device can run either the existing image or the new bundled
+ image, depending on the configuration.
+ currentlyExists - This mode instructs Cisco Catalyst Center to checks if the target devices already have the desired image version
+ installed. If image already present on devices, no action is taken and upgrade process is skipped for those devices. This mode
+ is useful for avoiding unnecessary upgrades on devices that already have the correct image version installed, thereby saving time.
+ type: str
+ distribute_if_needed:
+ description: Enable the distribute_if_needed option when activating the SWIM image.
+ type: bool
+ image_name:
+ description: SWIM image's name
+ type: str
+ device_serial_number:
+ description: Device serial number where the image needs to be activated
+ type: str
+ device_ip_address:
+ description: Device IP address where the image needs to be activated
+ type: str
+ device_hostname:
+ description: Device hostname where the image needs to be activated
+ type: str
+ device_mac_address:
+ description: Device MAC address where the image needs to be activated
+ type: str
+ schedule_validate:
+ description: ScheduleValidate query parameter. ScheduleValidate, validates data
+ before schedule (optional).
+ type: bool
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Method used are
+ software_image_management_swim.SoftwareImageManagementSwim.import_software_image_via_url,
+ software_image_management_swim.SoftwareImageManagementSwim.tag_as_golden_image,
+ software_image_management_swim.SoftwareImageManagementSwim.trigger_software_image_distribution,
+ software_image_management_swim.SoftwareImageManagementSwim.trigger_software_image_activation,
+
+ - Paths used are
+ post /dna/intent/api/v1/image/importation/source/url,
+ post /dna/intent/api/v1/image/importation/golden,
+ post /dna/intent/api/v1/image/distribution,
+ post /dna/intent/api/v1/image/activation/device,
+
+ - Added the parameter 'dnac_api_task_timeout', 'dnac_task_poll_interval' options in v6.13.2.
+
+"""
+
+EXAMPLES = r"""
+- name: Import an image from a URL, tag it as golden and load it on device
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - import_image_details:
+ type: remote
+ url_details:
+ payload:
+ - source_url: "http://10.10.10.10/stda/cat9k_iosxe.17.12.01.SPA.bin"
+ is_third_party: False
+ tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+ image_distribution_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_serial_number: FJC2327U0S2
+ image_activation_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ schedule_validate: False
+ activate_lower_image_version: False
+ distribute_if_needed: True
+ device_serial_number: FJC2327U0S2
+
+- name: Import an image from local, tag it as golden.
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - import_image_details:
+ type: local
+ local_image_details:
+ file_path: /Users/Downloads/cat9k_iosxe.17.12.01.SPA.bin
+ is_third_party: False
+ tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+
+- name: Tag the given image as golden and load it on device
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: True
+
+- name: Un-tagged the given image as golden and load it on device
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - tagging_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ device_role: ACCESS
+ device_image_family_name: Cisco Catalyst 9300 Switch
+ site_name: Global/USA/San Francisco/BGL_18
+ tagging: False
+
+- name: Distribute the given image on devices associated to that site with specified role.
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - image_distribution_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ site_name: Global/USA/San Francisco/BGL_18
+ device_role: ALL
+ device_family_name: Switches and Hubs
+ device_series_name: Cisco Catalyst 9300 Series Switches
+
+- name: Activate the given image on devices associated to that site with specified role.
+ cisco.dnac.swim_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: True
+ config:
+ - image_activation_details:
+ image_name: cat9k_iosxe.17.12.01.SPA.bin
+ site_name: Global/USA/San Francisco/BGL_18
+ device_role: ALL
+ device_family_name: Switches and Hubs
+ device_series_name: Cisco Catalyst 9300 Series Switches
+ scehdule_validate: False
+ activate_lower_image_version: True
+ distribute_if_needed: True
+
+"""
+
+RETURN = r"""
+#Case: SWIM image is successfully imported, tagged as golden, distributed and activated on a device
+response:
+ description: A dictionary with activation details as returned by the Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "additionalStatusURL": String,
+ "data": String,
+ "endTime": 0,
+ "id": String,
+ "instanceTenantId": String,
+ "isError": bool,
+ "lastUpdate": 0,
+ "progress": String,
+ "rootId": String,
+ "serviceType": String,
+ "startTime": 0,
+ "version": 0
+ },
+ "msg": String
+ }
+
+"""
+
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+)
+from ansible.module_utils.basic import AnsibleModule
+import os
+import time
+
+
+class Swim(DnacBase):
+ """Class containing member attributes for Swim workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.supported_states = ["merged"]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.status = "success"
+ self.msg = "Configuration is not available in the playbook for validation"
+ self.log(self.msg, "ERROR")
+ return self
+
+ temp_spec = dict(
+ import_image_details=dict(type='dict'),
+ tagging_details=dict(type='dict'),
+ image_distribution_details=dict(type='dict'),
+ image_activation_details=dict(type='dict'),
+ )
+
+ # Validate swim params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook config params: {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def site_exists(self, site_name):
+ """
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ tuple: A tuple containing two values:
+ - site_exists (bool): A boolean indicating whether the site exists (True) or not (False).
+ - site_id (str or None): The ID of the site if it exists, or None if the site is not found.
+ Description:
+ This method checks the existence of a site in the Catalyst Center. If the site is found,it sets 'site_exists' to True,
+ retrieves the site's ID, and returns both values in a tuple. If the site does not exist, 'site_exists' is set
+ to False, and 'site_id' is None. If an exception occurs during the site lookup, an exception is raised.
+ """
+
+ site_exists = False
+ site_id = None
+ response = None
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ params={"name": site_name},
+ )
+ except Exception as e:
+ self.msg = "An exception occurred: Site '{0}' does not exist in the Cisco Catalyst Center".format(site_name)
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(msg=self.msg)
+
+ if response:
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
+ site = response.get("response")
+ site_id = site[0].get("id")
+ site_exists = True
+
+ return (site_exists, site_id)
+
+ def get_image_id(self, name):
+ """
+ Retrieve the unique image ID based on the provided image name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ name (str): The name of the software image to search for.
+ Returns:
+ str: The unique image ID (UUID) corresponding to the given image name.
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its name.
+ It extracts and returns the image ID if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
+ image_response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params={"image_name": name},
+ )
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+ image_list = image_response.get("response")
+
+ if (len(image_list) == 1):
+ image_id = image_list[0].get("imageUuid")
+ self.log("SWIM image '{0}' has the ID: {1}".format(name, image_id), "INFO")
+ else:
+ error_message = "SWIM image '{0}' could not be found".format(name)
+ self.log(error_message, "ERROR")
+ self.module.fail_json(msg=error_message, response=image_response)
+
+ return image_id
+
+ def get_image_name_from_id(self, image_id):
+ """
+ Retrieve the unique image name based on the provided image id.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ id (str): The unique image ID (UUID) of the software image to search for.
+ Returns:
+ str: The image name corresponding to the given unique image ID (UUID)
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its id.
+ It extracts and returns the image name if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
+ image_response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params={"image_uuid": image_id},
+ )
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+ image_list = image_response.get("response")
+
+ if (len(image_list) == 1):
+ image_name = image_list[0].get("name")
+ self.log("SWIM image '{0}' has been fetched successfully from Cisco Catalyst Center".format(image_name), "INFO")
+ else:
+ error_message = "SWIM image with Id '{0}' could not be found in Cisco Catalyst Center".format(image_id)
+ self.log(error_message, "ERROR")
+ self.module.fail_json(msg=error_message, response=image_response)
+
+ return image_name
+
+ def is_image_exist(self, name):
+ """
+ Retrieve the unique image ID based on the provided image name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ name (str): The name of the software image to search for.
+ Returns:
+ str: The unique image ID (UUID) corresponding to the given image name.
+ Raises:
+ AnsibleFailJson: If the image is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve details about a software image based on its name.
+ It extracts and returns the image ID if a single matching image is found. If no image or multiple
+ images are found with the same name, it raises an exception.
+ """
+
+ image_exist = False
+ image_response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_software_image_details',
+ params={"image_name": name},
+ )
+ self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG")
+ image_list = image_response.get("response")
+
+ if (len(image_list) == 1):
+ image_exist = True
+
+ return image_exist
+
+ def get_device_id(self, params):
+ """
+ Retrieve the unique device ID based on the provided parameters.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ params (dict): A dictionary containing parameters to filter devices.
+ Returns:
+ str: The unique device ID corresponding to the filtered device.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve a list of devices based on the provided
+ filtering parameters. If a single matching device is found, it extracts and returns the device ID. If
+ no device or multiple devices match the criteria, it raises an exception.
+ """
+ device_id = None
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params=params,
+ )
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+
+ device_list = response.get("response")
+ if (len(device_list) == 1):
+ device_id = device_list[0].get("id")
+ self.log("Device Id: {0}".format(str(device_id)), "INFO")
+ else:
+ self.msg = "Device with params: '{0}' not found in Cisco Catalyst Center so can't fetch the device id".format(str(params))
+ self.log(self.msg, "WARNING")
+
+ return device_id
+
+ def get_device_uuids(self, site_name, device_family, device_role, device_series_name=None):
+ """
+ Retrieve a list of device UUIDs based on the specified criteria.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ site_name (str): The name of the site for which device UUIDs are requested.
+ device_family (str): The family/type of devices to filter on.
+ device_role (str): The role of devices to filter on. If None, 'ALL' roles are considered.
+ device_series_name(str): Specifies the name of the device series.
+ Returns:
+ list: A list of device UUIDs that match the specified criteria.
+ Description:
+ The function checks the reachability status and role of devices in the given site.
+ Only devices with "Reachable" status are considered, and filtering is based on the specified
+ device family and role (if provided).
+ """
+
+ device_uuid_list = []
+ if not site_name:
+ site_name = "Global"
+ self.log("Site name not specified; defaulting to 'Global' to fetch all devices under this category", "INFO")
+
+ (site_exists, site_id) = self.site_exists(site_name)
+ if not site_exists:
+ self.log("""Site '{0}' is not found in the Cisco Catalyst Center, hence unable to fetch associated
+ devices.""".format(site_name), "INFO")
+ return device_uuid_list
+
+ if device_series_name:
+ if device_series_name.startswith(".*") and device_series_name.endswith(".*"):
+ self.log("Device series name '{0}' is already in the regex format".format(device_series_name), "INFO")
+ else:
+ device_series_name = ".*" + device_series_name + ".*"
+
+ site_params = {
+ "site_id": site_id,
+ "device_family": device_family
+ }
+ response = self.dnac._exec(
+ family="sites",
+ function='get_membership',
+ op_modifies=True,
+ params=site_params,
+ )
+ self.log("Received API response from 'get_membership': {0}".format(str(response)), "DEBUG")
+ response = response['device']
+
+ site_response_list = []
+ for item in response:
+ if item['response']:
+ for item_dict in item['response']:
+ site_response_list.append(item_dict)
+
+ if device_role.upper() == 'ALL':
+ device_role = None
+
+ device_params = {
+ 'series': device_series_name,
+ 'family': device_family,
+ 'role': device_role
+ }
+ device_list_response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ op_modifies=True,
+ params=device_params,
+ )
+
+ device_response = device_list_response.get('response')
+ if not response or not device_response:
+ self.log("Failed to retrieve devices associated with the site '{0}' due to empty API response.".format(site_name), "INFO")
+ return device_uuid_list
+
+ site_memberships_ids, device_response_ids = [], []
+
+ for item in site_response_list:
+ if item["reachabilityStatus"] != "Reachable":
+ self.log("""Device '{0}' is currently '{1}' and cannot be included in the SWIM distribution/activation
+ process.""".format(item["managementIpAddress"], item["reachabilityStatus"]), "INFO")
+ continue
+ self.log("""Device '{0}' from site '{1}' is ready for the SWIM distribution/activation
+ process.""".format(item["managementIpAddress"], site_name), "INFO")
+ site_memberships_ids.append(item["instanceUuid"])
+
+ for item in device_response:
+ if item["reachabilityStatus"] != "Reachable":
+ self.log("""Unable to proceed with the device '{0}' for SWIM distribution/activation as its status is
+ '{1}'.""".format(item["managementIpAddress"], item["reachabilityStatus"]), "INFO")
+ continue
+ self.log("""Device '{0}' matches to the specified filter requirements and is set for SWIM
+ distribution/activation.""".format(item["managementIpAddress"]), "INFO")
+ device_response_ids.append(item["instanceUuid"])
+
+ # Find the intersection of device IDs with the response get from get_membership api and get_device_list api with provided filters
+ device_uuid_list = set(site_memberships_ids).intersection(set(device_response_ids))
+
+ return device_uuid_list
+
+ def get_device_family_identifier(self, family_name):
+ """
+ Retrieve and store the device family identifier based on the provided family name.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ family_name (str): The name of the device family for which to retrieve the identifier.
+ Returns:
+ None
+ Raises:
+ AnsibleFailJson: If the family name is not found in the response.
+ Description:
+ This function sends a request to Cisco Catalyst Center to retrieve a list of device family identifiers.It then
+ searches for a specific family name within the response and stores its associated identifier. If the family
+ name is found, the identifier is stored; otherwise, an exception is raised.
+ """
+
+ have = {}
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_device_family_identifiers',
+ )
+ self.log("Received API response from 'get_device_family_identifiers': {0}".format(str(response)), "DEBUG")
+ device_family_db = response.get("response")
+
+ if device_family_db:
+ device_family_details = get_dict_result(device_family_db, 'deviceFamily', family_name)
+
+ if device_family_details:
+ device_family_identifier = device_family_details.get("deviceFamilyIdentifier")
+ have["device_family_identifier"] = device_family_identifier
+ self.log("Family device indentifier: {0}".format(str(device_family_identifier)), "INFO")
+ else:
+ self.msg = "Device Family: {0} not found".format(str(family_name))
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(msg=self.msg, response=self.msg)
+ self.have.update(have)
+
+ def get_have(self):
+ """
+ Retrieve and store various software image and device details based on user-provided information.
+ Returns:
+ self: The current instance of the class with updated 'have' attributes.
+ Raises:
+ AnsibleFailJson: If required image or device details are not provided.
+ Description:
+ This function populates the 'have' dictionary with details related to software images, site information,
+ device families, distribution devices, and activation devices based on user-provided data in the 'want' dictionary.
+ It validates and retrieves the necessary information from Cisco Catalyst Center to support later actions.
+ """
+
+ if self.want.get("tagging_details"):
+ have = {}
+ tagging_details = self.want.get("tagging_details")
+ if tagging_details.get("image_name"):
+ name = tagging_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
+ have["tagging_image_id"] = image_id
+
+ elif self.have.get("imported_image_id"):
+ have["tagging_image_id"] = self.have.get("imported_image_id")
+
+ else:
+ self.log("Image details for tagging not provided", "CRITICAL")
+ self.module.fail_json(msg="Image details for tagging not provided", response=[])
+
+ # check if given site exists, store siteid
+ # if not then use global site
+ site_name = tagging_details.get("site_name")
+ if site_name:
+ site_exists = False
+ (site_exists, site_id) = self.site_exists(site_name)
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("Site {0} exists having the site id: {1}".format(site_name, str(site_id)), "DEBUG")
+ else:
+ # For global site, use -1 as siteId
+ have["site_id"] = "-1"
+ self.log("Site Name not given by user. Using global site.", "WARNING")
+
+ self.have.update(have)
+ # check if given device family name exists, store indentifier value
+ family_name = tagging_details.get("device_image_family_name")
+ self.get_device_family_identifier(family_name)
+
+ if self.want.get("distribution_details"):
+ have = {}
+ distribution_details = self.want.get("distribution_details")
+ site_name = distribution_details.get("site_name")
+ if site_name:
+ site_exists = False
+ (site_exists, site_id) = self.site_exists(site_name)
+
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("Site '{0}' exists and has the site ID: {1}".format(site_name, str(site_id)), "DEBUG")
+
+ # check if image for distributon is available
+ if distribution_details.get("image_name"):
+ name = distribution_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
+ have["distribution_image_id"] = image_id
+
+ elif self.have.get("imported_image_id"):
+ have["distribution_image_id"] = self.have.get("imported_image_id")
+
+ else:
+ self.log("Image details required for distribution have not been provided", "ERROR")
+ self.module.fail_json(msg="Image details required for distribution have not been provided", response=[])
+
+ device_params = dict(
+ hostname=distribution_details.get("device_hostname"),
+ serialNumber=distribution_details.get("device_serial_number"),
+ managementIpAddress=distribution_details.get("device_ip_address"),
+ macAddress=distribution_details.get("device_mac_address"),
+ )
+ device_id = self.get_device_id(device_params)
+
+ if device_id is not None:
+ have["distribution_device_id"] = device_id
+
+ self.have.update(have)
+
+ if self.want.get("activation_details"):
+ have = {}
+ activation_details = self.want.get("activation_details")
+ # check if image for activation is available
+ if activation_details.get("image_name"):
+ name = activation_details.get("image_name").split("/")[-1]
+ image_id = self.get_image_id(name)
+ have["activation_image_id"] = image_id
+
+ elif self.have.get("imported_image_id"):
+ have["activation_image_id"] = self.have.get("imported_image_id")
+ else:
+ self.log("Image details required for activation have not been provided", "ERROR")
+ self.module.fail_json(msg="Image details required for activation have not been provided", response=[])
+
+ site_name = activation_details.get("site_name")
+ if site_name:
+ site_exists = False
+ (site_exists, site_id) = self.site_exists(site_name)
+ if site_exists:
+ have["site_id"] = site_id
+ self.log("The site '{0}' exists and has the site ID '{1}'".format(site_name, str(site_id)), "INFO")
+
+ device_params = dict(
+ hostname=activation_details.get("device_hostname"),
+ serialNumber=activation_details.get("device_serial_number"),
+ managementIpAddress=activation_details.get("device_ip_address"),
+ macAddress=activation_details.get("device_mac_address"),
+ )
+ device_id = self.get_device_id(device_params)
+
+ if device_id is not None:
+ have["activation_device_id"] = device_id
+ self.have.update(have)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_want(self, config):
+ """
+ Retrieve and store import, tagging, distribution, and activation details from playbook configuration.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration dictionary containing image import and other details.
+ Returns:
+ self: The current instance of the class with updated 'want' attributes.
+ Raises:
+ AnsibleFailJson: If an incorrect import type is specified.
+ Description:
+ This function parses the playbook configuration to extract information related to image
+ import, tagging, distribution, and activation. It stores these details in the 'want' dictionary
+ for later use in the Ansible module.
+ """
+
+ want = {}
+ if config.get("import_image_details"):
+ want["import_image"] = True
+ want["import_type"] = config.get("import_image_details").get("type").lower()
+ if want["import_type"] == "remote":
+ want["url_import_details"] = config.get("import_image_details").get("url_details")
+ elif want["import_type"] == "local":
+ want["local_import_details"] = config.get("import_image_details").get("local_image_details")
+ else:
+ self.log("The import type '{0}' provided is incorrect. Only 'local' or 'remote' are supported.".format(want["import_type"]), "CRITICAL")
+ self.module.fail_json(msg="Incorrect import type. Supported Values: local or remote")
+
+ want["tagging_details"] = config.get("tagging_details")
+ want["distribution_details"] = config.get("image_distribution_details")
+ want["activation_details"] = config.get("image_activation_details")
+
+ self.want = want
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_diff_import(self):
+ """
+ Check the image import type and fetch the image ID for the imported image for further use.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function checks the type of image import (URL or local) and proceeds with the import operation accordingly.
+ It then monitors the import task's progress and updates the 'result' dictionary. If the operation is successful,
+ 'changed' is set to True.
+ Additionally, if tagging, distribution, or activation details are provided, it fetches the image ID for the
+ imported image and stores it in the 'have' dictionary for later use.
+ """
+
+ try:
+ import_type = self.want.get("import_type")
+
+ if not import_type:
+ self.status = "success"
+ self.msg = "Error: Details required for importing SWIM image. Please provide the necessary information."
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ self.result['changed'] = False
+ return self
+
+ if import_type == "remote":
+ image_name = self.want.get("url_import_details").get("payload")[0].get("source_url")
+ else:
+ image_name = self.want.get("local_import_details").get("file_path")
+
+ # Code to check if the image already exists in Catalyst Center
+ name = image_name.split('/')[-1]
+ image_exist = self.is_image_exist(name)
+
+ import_key_mapping = {
+ 'source_url': 'sourceURL',
+ 'image_family': 'imageFamily',
+ 'application_type': 'applicationType',
+ 'is_third_party': 'thirdParty',
+ }
+
+ if image_exist:
+ image_id = self.get_image_id(name)
+ self.have["imported_image_id"] = image_id
+ self.msg = "Image '{0}' already exists in the Cisco Catalyst Center".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ return self
+
+ if self.want.get("import_type") == "remote":
+ import_payload_dict = {}
+ temp_payload = self.want.get("url_import_details").get("payload")[0]
+ keys_to_change = list(import_key_mapping.keys())
+
+ for key, val in temp_payload.items():
+ if key in keys_to_change:
+ api_key_name = import_key_mapping[key]
+ import_payload_dict[api_key_name] = val
+
+ import_image_payload = [import_payload_dict]
+ import_params = dict(
+ payload=import_image_payload,
+ scheduleAt=self.want.get("url_import_details").get("schedule_at"),
+ scheduleDesc=self.want.get("url_import_details").get("schedule_desc"),
+ scheduleOrigin=self.want.get("url_import_details").get("schedule_origin"),
+ )
+ import_function = 'import_software_image_via_url'
+ else:
+ file_path = self.want.get("local_import_details").get("file_path")
+ import_params = dict(
+ is_third_party=self.want.get("local_import_details").get("is_third_party"),
+ third_party_vendor=self.want.get("local_import_details").get("third_party_vendor"),
+ third_party_image_family=self.want.get("local_import_details").get("third_party_image_family"),
+ third_party_application_type=self.want.get("local_import_details").get("third_party_application_type"),
+ multipart_fields={'file': (os.path.basename(file_path), open(file_path, 'rb'), 'application/octet-stream')},
+ multipart_monitor_callback=None
+ )
+ import_function = 'import_local_software_image'
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function=import_function,
+ op_modifies=True,
+ params=import_params,
+ )
+ self.log("Received API response from {0}: {1}".format(import_function, str(response)), "DEBUG")
+
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while (True):
+ task_details = self.get_task_details(task_id)
+ name = image_name.split('/')[-1]
+
+ if task_details and \
+ ("completed successfully" in task_details.get("progress").lower()):
+ self.result['changed'] = True
+ self.status = "success"
+ self.msg = "Swim Image {0} imported successfully".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+
+ if task_details and task_details.get("isError"):
+ if "already exists" in task_details.get("failureReason", ""):
+ self.msg = "SWIM Image {0} already exists in the Cisco Catalyst Center".format(name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ self.status = "success"
+ self.result['changed'] = False
+ break
+ else:
+ self.status = "failed"
+ self.msg = task_details.get("failureReason", "SWIM Image {0} seems to be invalid".format(image_name))
+ self.log(self.msg, "WARNING")
+ self.result['response'] = self.msg
+ return self
+
+ self.result['response'] = task_details if task_details else response
+
+ # Fetch image_id for the imported image for further use
+ image_name = image_name.split('/')[-1]
+ image_id = self.get_image_id(image_name)
+ self.have["imported_image_id"] = image_id
+
+ return self
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error: Import image details are not provided in the playbook, or the Import Image API was not
+ triggered successfully. Please ensure the necessary details are provided and verify the status of the Import Image process."""
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ return self
+
+ def get_diff_tagging(self):
+ """
+ Tag or untag a software image as golden based on provided tagging details.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function tags or untags a software image as a golden image in Cisco Catalyst Center based on the provided
+ tagging details. The tagging action is determined by the value of the 'tagging' attribute
+ in the 'tagging_details' dictionary.If 'tagging' is True, the image is tagged as golden, and if 'tagging'
+ is False, the golden tag is removed. The function sends the appropriate request to Cisco Catalyst Center and updates the
+ task details in the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
+ tagging_details = self.want.get("tagging_details")
+ tag_image_golden = tagging_details.get("tagging")
+ image_name = self.get_image_name_from_id(self.have.get("tagging_image_id"))
+
+ image_params = dict(
+ image_id=self.have.get("tagging_image_id"),
+ site_id=self.have.get("site_id"),
+ device_family_identifier=self.have.get("device_family_identifier"),
+ device_role=tagging_details.get("device_role", "ALL").upper()
+ )
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_golden_tag_status_of_an_image',
+ op_modifies=True,
+ params=image_params
+ )
+ self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG")
+
+ response = response.get('response')
+ if response:
+ image_status = response['taggedGolden']
+ if image_status and image_status == tag_image_golden:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ if not image_status and image_status == tag_image_golden:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name)
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+ return self
+
+ if tag_image_golden:
+ image_params = dict(
+ imageId=self.have.get("tagging_image_id"),
+ siteId=self.have.get("site_id"),
+ deviceFamilyIdentifier=self.have.get("device_family_identifier"),
+ deviceRole=tagging_details.get("device_role", "ALL").upper()
+ )
+ self.log("Parameters for tagging the image as golden: {0}".format(str(image_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='tag_as_golden_image',
+ op_modifies=True,
+ params=image_params
+ )
+ self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG")
+
+ else:
+ self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='remove_golden_tag_for_image',
+ op_modifies=True,
+ params=image_params
+ )
+ self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.status = "failed"
+ self.msg = "Did not get the response of API so cannot check the Golden tagging status of image - {0}".format(image_name)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while True:
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and 'successful' in task_details.get("progress"):
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = task_details.get("progress")
+ self.result['msg'] = self.msg
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+ break
+ elif task_details.get("isError"):
+ failure_reason = task_details.get("failureReason", "")
+ if failure_reason and "An inheritted tag cannot be un-tagged" in failure_reason:
+ self.status = "failed"
+ self.result['changed'] = False
+ self.msg = failure_reason
+ self.result['msg'] = failure_reason
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+ else:
+ error_message = task_details.get("failureReason", "Error: while tagging/un-tagging the golden swim image.")
+ self.status = "failed"
+ self.msg = error_message
+ self.result['msg'] = error_message
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ break
+
+ return self
+
+ def get_device_ip_from_id(self, device_id):
+ """
+ Retrieve the management IP address of a device from Cisco Catalyst Center using its ID.
+ Parameters:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - device_id (str): The unique identifier of the device in Cisco Catalyst Center.
+ Returns:
+ str: The management IP address of the specified device.
+ Raises:
+ Exception: If there is an error while retrieving the response from Cisco Catalyst Center.
+ Description:
+ This method queries Cisco Catalyst Center for the device details based on its unique identifier (ID).
+ It uses the 'get_device_list' function in the 'devices' family, extracts the management IP address
+ from the response, and returns it. If any error occurs during the process, an exception is raised
+ with an appropriate error message logged.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="devices",
+ function='get_device_list',
+ params={"id": device_id}
+ )
+ self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG")
+ response = response.get('response')[0]
+ device_ip = response.get("managementIpAddress")
+
+ return device_ip
+ except Exception as e:
+ error_message = "Error occurred while getting the response of device from Cisco Catalyst Center: {0}".format(str(e))
+ self.log(error_message, "ERROR")
+ raise Exception(error_message)
+
+ def check_swim_task_status(self, swim_task_dict, swim_task_name):
+ """
+ Check the status of the SWIM (Software Image Management) task for each device.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ swim_task_dict (dict): A dictionary containing the mapping of device IP address to the respective task ID.
+ swim_task_name (str): The name of the SWIM task being checked which is either Distribution or Activation.
+ Returns:
+ tuple: A tuple containing two elements:
+ - device_ips_list (list): A list of device IP addresses for which the SWIM task failed.
+ - device_count (int): The count of devices for which the SWIM task was successful.
+ Description:
+ This function iterates through the distribution_task_dict, which contains the mapping of
+ device IP address to their respective task ID. It checks the status of the SWIM task for each device by
+ repeatedly querying for task details until the task is either completed successfully or fails. If the task
+ is successful, the device count is incremented. If the task fails, an error message is logged, and the device
+ IP is appended to the device_ips_list and return a tuple containing the device_ips_list and device_count.
+ """
+
+ device_ips_list = []
+ device_count = 0
+
+ for device_ip, task_id in swim_task_dict.items():
+ start_time = time.time()
+
+ while (True):
+ end_time = time.time()
+ max_timeout = self.params.get('dnac_api_task_timeout')
+
+ if (end_time - start_time) >= max_timeout:
+ self.log("""Max timeout of {0} has reached for the task id '{1}' for the device '{2}' and unexpected
+ task status so moving out to next task id""".format(max_timeout, task_id, device_ip), "WARNING")
+ device_ips_list.append(device_ip)
+ break
+
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and \
+ ("completed successfully" in task_details.get("progress")):
+ self.result['changed'] = True
+ self.status = "success"
+ self.log("Image {0} successfully for the device '{1}".format(swim_task_name, device_ip), "INFO")
+ device_count += 1
+ break
+
+ if task_details.get("isError"):
+ error_msg = "Image {0} gets failed for the device '{1}'".format(swim_task_name, device_ip)
+ self.log(error_msg, "ERROR")
+ self.result['response'] = task_details
+ device_ips_list.append(device_ip)
+ break
+ time.sleep(self.params.get('dnac_task_poll_interval'))
+
+ return device_ips_list, device_count
+
+ def get_diff_distribution(self):
+ """
+ Get image distribution parameters from the playbook and trigger image distribution.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function retrieves image distribution parameters from the playbook's 'distribution_details' and triggers
+ the distribution of the specified software image to the specified device. It monitors the distribution task's
+ progress and updates the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
+ distribution_details = self.want.get("distribution_details")
+ site_name = distribution_details.get("site_name")
+ device_family = distribution_details.get("device_family_name")
+ device_role = distribution_details.get("device_role", "ALL")
+ device_series_name = distribution_details.get("device_series_name")
+ device_uuid_list = self.get_device_uuids(site_name, device_family, device_role, device_series_name)
+ image_id = self.have.get("distribution_image_id")
+ self.complete_successful_distribution = False
+ self.partial_successful_distribution = False
+ self.single_device_distribution = False
+
+ if self.have.get("distribution_device_id"):
+ distribution_params = dict(
+ payload=[dict(
+ deviceUuid=self.have.get("distribution_device_id"),
+ imageUuid=image_id
+ )]
+ )
+ self.log("Distribution Params: {0}".format(str(distribution_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_distribution',
+ op_modifies=True,
+ params=distribution_params,
+ )
+ self.log("Received API response from 'trigger_software_image_distribution': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while (True):
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and \
+ ("completed successfully" in task_details.get("progress")):
+ self.result['changed'] = True
+ self.status = "success"
+ self.single_device_distribution = True
+ self.result['msg'] = "Image with Id {0} Distributed Successfully".format(image_id)
+ break
+
+ if task_details.get("isError"):
+ self.status = "failed"
+ self.msg = "Image with Id {0} Distribution Failed".format(image_id)
+ self.log(self.msg, "ERROR")
+ self.result['response'] = task_details
+ break
+
+ self.result['response'] = task_details if task_details else response
+
+ return self
+
+ if len(device_uuid_list) == 0:
+ self.status = "success"
+ self.msg = "The SWIM image distribution task could not proceed because no eligible devices were found"
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ self.log("Device UUIDs involved in Image Distribution: {0}".format(str(device_uuid_list)), "INFO")
+ distribution_task_dict = {}
+
+ for device_uuid in device_uuid_list:
+ device_management_ip = self.get_device_ip_from_id(device_uuid)
+ distribution_params = dict(
+ payload=[dict(
+ deviceUuid=device_uuid,
+ imageUuid=image_id
+ )]
+ )
+ self.log("Distribution Params: {0}".format(str(distribution_params)), "INFO")
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_distribution',
+ op_modifies=True,
+ params=distribution_params,
+ )
+ self.log("Received API response from 'trigger_software_image_distribution': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+ distribution_task_dict[device_management_ip] = task_id
+
+ device_ips_list, device_distribution_count = self.check_swim_task_status(distribution_task_dict, 'Distribution')
+
+ if device_distribution_count == 0:
+ self.status = "failed"
+ self.msg = "Image with Id {0} Distribution Failed for all devices".format(image_id)
+ elif device_distribution_count == len(device_uuid_list):
+ self.result['changed'] = True
+ self.status = "success"
+ self.complete_successful_distribution = True
+ self.msg = "Image with Id {0} Distributed Successfully for all devices".format(image_id)
+ else:
+ self.result['changed'] = True
+ self.status = "success"
+ self.partial_successful_distribution = False
+ self.msg = "Image with Id '{0}' Distributed and partially successfull".format(image_id)
+ self.log("For device(s) {0} image Distribution gets failed".format(str(device_ips_list)), "CRITICAL")
+
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_diff_activation(self):
+ """
+ Get image activation parameters from the playbook and trigger image activation.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function retrieves image activation parameters from the playbook's 'activation_details' and triggers the
+ activation of the specified software image on the specified device. It monitors the activation task's progress and
+ updates the 'result' dictionary. If the operation is successful, 'changed' is set to True.
+ """
+
+ activation_details = self.want.get("activation_details")
+ site_name = activation_details.get("site_name")
+ device_family = activation_details.get("device_family_name")
+ device_role = activation_details.get("device_role", "ALL")
+ device_series_name = activation_details.get("device_series_name")
+ device_uuid_list = self.get_device_uuids(site_name, device_family, device_role, device_series_name)
+ image_id = self.have.get("activation_image_id")
+ self.complete_successful_activation = False
+ self.partial_successful_activation = False
+ self.single_device_activation = False
+
+ if self.have.get("activation_device_id"):
+ payload = [dict(
+ activateLowerImageVersion=activation_details.get("activate_lower_image_version"),
+ deviceUpgradeMode=activation_details.get("device_upgrade_mode"),
+ distributeIfNeeded=activation_details.get("distribute_if_needed"),
+ deviceUuid=self.have.get("activation_device_id"),
+ imageUuidList=[image_id]
+ )]
+
+ activation_params = dict(
+ schedule_validate=activation_details.get("scehdule_validate"),
+ payload=payload
+ )
+ self.log("Activation Params: {0}".format(str(activation_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_activation',
+ op_modifies=True,
+ params=activation_params,
+ )
+ self.log("Received API response from 'trigger_software_image_activation': {0}".format(str(response)), "DEBUG")
+
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+
+ while (True):
+ task_details = self.get_task_details(task_id)
+
+ if not task_details.get("isError") and \
+ ("completed successfully" in task_details.get("progress")):
+ self.result['changed'] = True
+ self.result['msg'] = "Image Activated successfully"
+ self.status = "success"
+ self.single_device_activation = True
+ break
+
+ if task_details.get("isError"):
+ self.msg = "Activation for Image with Id '{0}' gets failed".format(image_id)
+ self.status = "failed"
+ self.result['response'] = task_details
+ self.log(self.msg, "ERROR")
+ return self
+
+ self.result['response'] = task_details if task_details else response
+
+ return self
+
+ if len(device_uuid_list) == 0:
+ self.status = "success"
+ self.msg = "The SWIM image activation task could not proceed because no eligible devices were found."
+ self.result['msg'] = self.msg
+ self.log(self.msg, "WARNING")
+ return self
+
+ self.log("Device UUIDs involved in Image Activation: {0}".format(str(device_uuid_list)), "INFO")
+ activation_task_dict = {}
+
+ for device_uuid in device_uuid_list:
+ device_management_ip = self.get_device_ip_from_id(device_uuid)
+ payload = [dict(
+ activateLowerImageVersion=activation_details.get("activate_lower_image_version"),
+ deviceUpgradeMode=activation_details.get("device_upgrade_mode"),
+ distributeIfNeeded=activation_details.get("distribute_if_needed"),
+ deviceUuid=device_uuid,
+ imageUuidList=[image_id]
+ )]
+
+ activation_params = dict(
+ schedule_validate=activation_details.get("scehdule_validate"),
+ payload=payload
+ )
+ self.log("Activation Params: {0}".format(str(activation_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='trigger_software_image_activation',
+ op_modifies=True,
+ params=activation_params,
+ )
+ self.log("Received API response from 'trigger_software_image_activation': {0}".format(str(response)), "DEBUG")
+
+ if response:
+ task_details = {}
+ task_id = response.get("response").get("taskId")
+ activation_task_dict[device_management_ip] = task_id
+
+ device_ips_list, device_activation_count = self.check_swim_task_status(activation_task_dict, 'Activation')
+
+ if device_activation_count == 0:
+ self.status = "failed"
+ self.msg = "Image with Id '{0}' activation failed for all devices".format(image_id)
+ elif device_activation_count == len(device_uuid_list):
+ self.result['changed'] = True
+ self.status = "success"
+ self.complete_successful_activation = True
+ self.msg = "Image with Id '{0}' activated successfully for all devices".format(image_id)
+ else:
+ self.result['changed'] = True
+ self.status = "success"
+ self.partial_successful_activation = True
+ self.msg = "Image with Id '{0}' activated and partially successful.".format(image_id)
+ self.log("For Device(s) {0} Image activation gets Failed".format(str(device_ips_list)), "CRITICAL")
+
+ self.result['msg'] = self.msg
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Get tagging details and then trigger distribution followed by activation if specified in the playbook.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration dictionary containing tagging, distribution, and activation details.
+ Returns:
+ self: The current instance of the class with updated 'result' and 'have' attributes.
+ Description:
+ This function checks the provided playbook configuration for tagging, distribution, and activation details. It
+ then triggers these operations in sequence if the corresponding details are found in the configuration.The
+ function monitors the progress of each task and updates the 'result' dictionary accordingly. If any of the
+ operations are successful, 'changed' is set to True.
+ """
+
+ if config.get("tagging_details"):
+ self.get_diff_tagging().check_return_status()
+
+ if config.get("image_distribution_details"):
+ self.get_diff_distribution().check_return_status()
+
+ if config.get("image_activation_details"):
+ self.get_diff_activation().check_return_status()
+
+ return self
+
+ def verify_diff_imported(self, import_type):
+ """
+ Verify the successful import of a software image into Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ import_type (str): The type of import, either 'remote' or 'local'.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the successful import of a software image into Cisco Catalyst Center.
+ It checks whether the image exists in Catalyst Center based on the provided import type.
+ If the image exists, the status is set to 'success', and a success message is logged.
+ If the image does not exist, a warning message is logged indicating a potential import failure.
+ """
+
+ if import_type == "remote":
+ image_name = self.want.get("url_import_details").get("payload")[0].get("source_url")
+ else:
+ image_name = self.want.get("local_import_details").get("file_path")
+
+ # Code to check if the image already exists in Catalyst Center
+ name = image_name.split('/')[-1]
+ image_exist = self.is_image_exist(name)
+ if image_exist:
+ self.status = "success"
+ self.msg = "The requested Image '{0}' imported in the Cisco Catalyst Center and Image presence has been verified.".format(name)
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""The playbook input for SWIM Image '{0}' does not align with the Cisco Catalyst Center, indicating that image
+ may not have imported successfully.""".format(name), "INFO")
+
+ return self
+
+ def verify_diff_tagged(self):
+ """
+ Verify the Golden tagging status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the tagging status of a software image in Cisco Catalyst Center.
+ It retrieves tagging details from the input, including the desired tagging status and image ID.
+ Using the provided image ID, it obtains image parameters required for checking the image status.
+ The method then queries Catalyst Center to get the golden tag status of the image.
+ If the image status matches the desired tagging status, a success message is logged.
+ If there is a mismatch between the playbook input and the Catalyst Center, a warning message is logged.
+ """
+
+ tagging_details = self.want.get("tagging_details")
+ tag_image_golden = tagging_details.get("tagging")
+ image_id = self.have.get("tagging_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ image_params = dict(
+ image_id=self.have.get("tagging_image_id"),
+ site_id=self.have.get("site_id"),
+ device_family_identifier=self.have.get("device_family_identifier"),
+ device_role=tagging_details.get("device_role", "ALL").upper()
+ )
+ self.log("Parameters for checking the status of image: {0}".format(str(image_params)), "INFO")
+
+ response = self.dnac._exec(
+ family="software_image_management_swim",
+ function='get_golden_tag_status_of_an_image',
+ op_modifies=True,
+ params=image_params
+ )
+ self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG")
+
+ response = response.get('response')
+ if response:
+ image_status = response['taggedGolden']
+ if image_status == tag_image_golden:
+ if tag_image_golden:
+ self.msg = """The requested image '{0}' has been tagged as golden in the Cisco Catalyst Center and
+ its status has been successfully verified.""".format(image_name)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The requested image '{0}' has been un-tagged as golden in the Cisco Catalyst Center and
+ image status has been verified.""".format(image_name)
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook input for tagging/un-tagging image as golden and the Cisco Catalyst Center indicates that
+ the tagging/un-tagging task was not executed successfully.""", "INFO")
+
+ return self
+
+ def verify_diff_distributed(self):
+ """
+ Verify the distribution status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the distribution status of a software image in Cisco Catalyst Center.
+ It retrieves the image ID and name from the input and if distribution device ID is provided, it checks the distribution status for that
+ list of specific device and logs the info message based on distribution status.
+ """
+
+ image_id = self.have.get("distribution_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ if self.have.get("distribution_device_id"):
+ if self.single_device_distribution:
+ self.msg = """The requested image '{0}', associated with the device ID '{1}', has been successfully distributed in the Cisco Catalyst Center
+ and its status has been verified.""".format(image_name, self.have.get("distribution_device_id"))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook input for distributing the image to the device with ID '{0}' and the actual state in the
+ Cisco Catalyst Center suggests that the distribution task might not have been executed
+ successfully.""".format(self.have.get("distribution_device_id")), "INFO")
+ elif self.complete_successful_distribution:
+ self.msg = """The requested image '{0}', with ID '{1}', has been successfully distributed to all devices within the specified
+ site in the Cisco Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ elif self.partial_successful_distribution:
+ self.msg = """T"The requested image '{0}', with ID '{1}', has been partially distributed across some devices in the Cisco Catalyst
+ Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The requested image '{0}', with ID '{1}', failed to be distributed across devices in the Cisco Catalyst
+ Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_activated(self):
+ """
+ Verify the activation status of a software image in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the activation status of a software image in Cisco Catalyst Center and retrieves the image ID and name from
+ the input. If activation device ID is provided, it checks the activation status for that specific device. Based on activation status
+ a corresponding message is logged.
+ """
+
+ image_id = self.have.get("activation_image_id")
+ image_name = self.get_image_name_from_id(image_id)
+
+ if self.have.get("activation_device_id"):
+ if self.single_device_activation:
+ self.msg = """The requested image '{0}', associated with the device ID '{1}', has been successfully activated in the Cisco Catalyst
+ Center and its status has been verified.""".format(image_name, self.have.get("activation_device_id"))
+ self.log(self.msg, "INFO")
+ else:
+ self.log("""Mismatch between the playbook's input for activating the image '{0}' on the device with ID '{1}' and the actual state in
+ the Cisco Catalyst Center suggests that the activation task might not have been executed
+ successfully.""".format(image_name, self.have.get("activation_device_id")), "INFO")
+ elif self.complete_successful_activation:
+ self.msg = """The requested image '{0}', with ID '{1}', has been successfully activated on all devices within the specified site in the
+ Cisco Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ elif self.partial_successful_activation:
+ self.msg = """"The requested image '{0}', with ID '{1}', has been partially activated on some devices in the Cisco
+ Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+ else:
+ self.msg = """The activation of the requested image '{0}', with ID '{1}', failed on devices in the Cisco
+ Catalyst Center.""".format(image_name, image_id)
+ self.log(self.msg, "INFO")
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the merged status(Importing/Tagging/Distributing/Actiavting) the SWIM Image in devices in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ - config (dict): The configuration details to be verified.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by retrieving the current state
+ (have) and desired state (want) of the configuration, logs the states, and validates whether the specified
+ SWIM operation performed or not.
+ """
+
+ self.get_have()
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ import_type = self.want.get("import_type")
+ if import_type:
+ self.verify_diff_imported(import_type).check_return_status()
+
+ tagged = self.want.get("tagging_details")
+ if tagged:
+ self.verify_diff_tagged().check_return_status()
+
+ distribution_details = self.want.get("distribution_details")
+ if distribution_details:
+ self.verify_diff_distributed().check_return_status()
+
+ activation_details = self.want.get("activation_details")
+ if activation_details:
+ self.verify_diff_activated().check_return_status()
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_swims = Swim(module)
+ state = ccc_swims.params.get("state")
+
+ if state not in ccc_swims.supported_states:
+ ccc_swims.status = "invalid"
+ ccc_swims.msg = "State {0} is invalid".format(state)
+ ccc_swims.check_return_status()
+
+ ccc_swims.validate_input().check_return_status()
+ config_verify = ccc_swims.params.get("config_verify")
+
+ for config in ccc_swims.validated_config:
+ ccc_swims.reset_values()
+ ccc_swims.get_want(config).check_return_status()
+ ccc_swims.get_diff_import().check_return_status()
+ ccc_swims.get_have().check_return_status()
+ ccc_swims.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_swims.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_swims.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_member.py b/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
index 528caf301..017fd1c23 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
@@ -79,6 +79,7 @@ EXAMPLES = r"""
state: absent
id: string
memberId: string
+ memberType: string
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/template_intent.py b/ansible_collections/cisco/dnac/plugins/modules/template_intent.py
index 8a42f9271..c6e3042de 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/template_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/template_intent.py
@@ -1,13 +1,13 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-
# Copyright (c) 2022, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+"""Ansible module to perform operations on project and templates in DNAC."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-__author__ = ("Madhan Sankaranarayanan, Rishita Chowdhary")
+__author__ = ['Madhan Sankaranarayanan, Rishita Chowdhary, Akash Bhaskaran, Muthu Rakesh']
DOCUMENTATION = r"""
---
@@ -18,12 +18,22 @@ description:
- API to create a template by project name and template name.
- API to update a template by template name and project name.
- API to delete a template by template name and project name.
+- API to export the projects for given projectNames.
+- API to export the templates for given templateIds.
+- API to manage operation create of the resource Configuration Template Import Project.
+- API to manage operation create of the resource Configuration Template Import Template.
version_added: '6.6.0'
extends_documentation_fragment:
- cisco.dnac.intent_params
author: Madhan Sankaranarayanan (@madhansansel)
Rishita Chowdhary (@rishitachowdhary)
+ Akash Bhaskaran (@akabhask)
+ Muthu Rakesh (@MUTHU-RAKESH-27)
options:
+ config_verify:
+ description: Set to True to verify the Cisco DNA Center after applying the playbook config.
+ type: bool
+ default: False
state:
description: The state of DNAC after module completion.
type: str
@@ -36,35 +46,270 @@ options:
elements: dict
required: true
suboptions:
- author:
- description: Author of template.
- type: str
- composite:
- description: Is it composite template.
- type: bool
- containingTemplates:
- description: Configuration Template Create's containingTemplates.
+ configuration_templates:
+ description: Create/Update/Delete template.
+ type: dict
suboptions:
+ author:
+ description: Author of template.
+ type: str
composite:
description: Is it composite template.
type: bool
- description:
- description: Description of template.
- type: str
- deviceTypes:
- description: Configuration Template Create's deviceTypes.
+ containing_templates:
+ description: Configuration Template Create's containingTemplates.
+ suboptions:
+ composite:
+ description: Is it composite template.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: deviceTypes on which templates would be applied.
+ type: list
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language
+ choices:
+ - JINJA
+ - VELOCITY
+ type: str
+ name:
+ description: Name of template.
+ type: str
+ project_name:
+ description: Name of the project under which templates are managed.
+ type: str
+ project_description:
+ description: Description of the project created.
+ type: str
+ rollback_template_params:
+ description: Params required for template rollback.
+ type: list
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ tags:
+ description: Configuration Template Create's tags.
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ elements: dict
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Create's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ elements: dict
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ version:
+ description: Current version of template.
+ type: str
type: list
elements: dict
+ create_time:
+ description: Create time of template.
+ type: int
+ custom_params_order:
+ description: Custom Params Order.
+ type: bool
+ template_description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Create's deviceTypes. This field is mandatory to create a new template.
suboptions:
- productFamily:
+ product_family:
description: Device family.
type: str
- productSeries:
+ product_series:
description: Device series.
type: str
- productType:
+ product_type:
description: Device type.
type: str
+ type: list
+ elements: dict
+ failure_policy:
+ description: Define failure policy if template provisioning fails.
+ type: str
id:
description: UUID of template.
type: str
@@ -74,33 +319,49 @@ options:
- JINJA
- VELOCITY
type: str
- name:
- description: Name of template.
+ last_update_time:
+ description: Update time of template.
+ type: int
+ latest_version_time:
+ description: Latest versioned template time.
+ type: int
+ template_name:
+ description: Name of template. This field is mandatory to create a new template.
+ type: str
+ parent_template_id:
+ description: Parent templateID.
+ type: str
+ project_id:
+ description: Project UUID.
type: str
- projectName:
+ project_name:
description: Project name.
type: str
- rollbackTemplateParams:
+ project_description:
+ description: Project Description.
+ type: str
+ rollback_template_content:
+ description: Rollback template content.
+ type: str
+ rollback_template_params:
description: Configuration Template Create's rollbackTemplateParams.
- type: list
- elements: dict
suboptions:
binding:
description: Bind to source.
type: str
- customOrder:
+ custom_order:
description: CustomOrder of template param.
type: int
- dataType:
+ data_type:
description: Datatype of template param.
type: str
- defaultValue:
+ default_value:
description: Default value of template param.
type: str
description:
description: Description of template param.
type: str
- displayName:
+ display_name:
description: Display name of param.
type: str
group:
@@ -109,22 +370,22 @@ options:
id:
description: UUID of template param.
type: str
- instructionText:
+ instruction_text:
description: Instruction text for param.
type: str
key:
description: Key.
type: str
- notParam:
+ not_param:
description: Is it not a variable.
type: bool
order:
description: Order of template param.
type: int
- paramArray:
+ param_array:
description: Is it an array.
type: bool
- parameterName:
+ parameter_name:
description: Name of template param.
type: str
provider:
@@ -132,39 +393,50 @@ options:
type: str
range:
description: Configuration Template Create's range.
- type: list
- elements: dict
suboptions:
id:
description: UUID of range.
type: str
- maxValue:
+ max_value:
description: Max value of range.
type: int
- minValue:
+ min_value:
description: Min value of range.
type: int
+ type: list
+ elements: dict
required:
description: Is param required.
type: bool
selection:
description: Configuration Template Create's selection.
suboptions:
- defaultSelectedValues:
+ default_selected_values:
description: Default selection values.
elements: str
type: list
id:
description: UUID of selection.
type: str
- selectionType:
+ selection_type:
description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
type: str
- selectionValues:
+ selection_values:
description: Selection values.
type: dict
type: dict
- tags:
+ type: list
+ elements: dict
+ software_type:
+ description: Applicable device software type. This field is mandatory to create a new template.
+ type: str
+ software_variant:
+ description: Applicable device software variant.
+ type: str
+ software_version:
+ description: Applicable device software version.
+ type: str
+ template_tag:
description: Configuration Template Create's tags.
suboptions:
id:
@@ -175,29 +447,28 @@ options:
type: str
type: list
elements: dict
- templateContent:
+ template_content:
description: Template content.
type: str
- templateParams:
+ template_params:
description: Configuration Template Create's templateParams.
- elements: dict
suboptions:
binding:
description: Bind to source.
type: str
- customOrder:
+ custom_order:
description: CustomOrder of template param.
type: int
- dataType:
+ data_type:
description: Datatype of template param.
type: str
- defaultValue:
+ default_value:
description: Default value of template param.
type: str
description:
description: Description of template param.
type: str
- displayName:
+ display_name:
description: Display name of param.
type: str
group:
@@ -206,22 +477,22 @@ options:
id:
description: UUID of template param.
type: str
- instructionText:
+ instruction_text:
description: Instruction text for param.
type: str
key:
description: Key.
type: str
- notParam:
+ not_param:
description: Is it not a variable.
type: bool
order:
description: Order of template param.
type: int
- paramArray:
+ param_array:
description: Is it an array.
type: bool
- parameterName:
+ parameter_name:
description: Name of template param.
type: str
provider:
@@ -233,10 +504,10 @@ options:
id:
description: UUID of range.
type: str
- maxValue:
+ max_value:
description: Max value of range.
type: int
- minValue:
+ min_value:
description: Min value of range.
type: int
type: list
@@ -247,294 +518,590 @@ options:
selection:
description: Configuration Template Create's selection.
suboptions:
- defaultSelectedValues:
+ default_selected_values:
description: Default selection values.
elements: str
type: list
id:
description: UUID of selection.
type: str
- selectionType:
+ selection_type:
description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
type: str
- selectionValues:
+ selection_values:
description: Selection values.
type: dict
type: dict
type: list
- version:
- description: Current version of template.
- type: str
- type: list
- elements: dict
- createTime:
- description: Create time of template.
- type: int
- customParamsOrder:
- description: Custom Params Order.
- type: bool
- template_description:
- description: Description of template.
- type: str
- deviceTypes:
- description: Configuration Template Create's deviceTypes.
- suboptions:
- productFamily:
- description: Device family.
- type: str
- productSeries:
- description: Device series.
- type: str
- productType:
- description: Device type.
- type: str
- type: list
- elements: dict
- failurePolicy:
- description: Define failure policy if template provisioning fails.
- type: str
- language:
- description: Template language
- choices:
- - JINJA
- - VELOCITY
- type: str
- lastUpdateTime:
- description: Update time of template.
- type: int
- latestVersionTime:
- description: Latest versioned template time.
- type: int
- templateName:
- description: Name of template.
- type: str
- parentTemplateId:
- description: Parent templateID.
- type: str
- projectId:
- description: Project UUID.
- type: str
- projectName:
- description: Project name.
- type: str
- rollbackTemplateContent:
- description: Rollback template content.
- type: str
- rollbackTemplateParams:
- description: Configuration Template Create's rollbackTemplateParams.
- suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Create's range.
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
- type: list
elements: dict
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Create's selection.
+ validation_errors:
+ description: Configuration Template Create's validationErrors.
suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
+ rollback_template_errors:
+ description: Validation or design conflicts errors of rollback template.
+ elements: dict
type: list
- id:
- description: UUID of selection.
+ template_errors:
+ description: Validation or design conflicts errors.
+ elements: dict
+ type: list
+ template_id:
+ description: UUID of template.
type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ template_version:
+ description: Current version of template.
type: str
- selectionValues:
- description: Selection values.
- type: dict
type: dict
- type: list
- elements: dict
- softwareType:
- description: Applicable device software type.
- type: str
- softwareVariant:
- description: Applicable device software variant.
- type: str
- softwareVersion:
- description: Applicable device software version.
- type: str
- template_tag:
- description: Configuration Template Create's tags.
- suboptions:
- id:
- description: UUID of tag.
+ version:
+ description: Current version of template.
type: str
- name:
- description: Name of tag.
+ version_description:
+ description: Template version comments.
type: str
- type: list
- elements: dict
- templateContent:
- description: Template content.
- type: str
- templateParams:
- description: Configuration Template Create's templateParams.
+ export:
+ description: Export the project/template details.
+ type: dict
suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Create's range.
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
+ project:
+ description: Export the project.
+ type: list
+ elements: str
+ template:
+ description: Export the template.
type: list
elements: dict
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Create's selection.
suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
- type: list
- id:
- description: UUID of selection.
+ project_name:
+ description: Name of the project under the template available.
type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ template_name:
+ description: Name of the template which we need to export
type: str
- selectionValues:
- description: Selection values.
- type: dict
- type: dict
- type: list
- elements: dict
- validationErrors:
- description: Configuration Template Create's validationErrors.
- suboptions:
- rollbackTemplateErrors:
- description: Validation or design conflicts errors of rollback template.
- elements: dict
- type: list
- templateErrors:
- description: Validation or design conflicts errors.
- elements: dict
- type: list
- templateId:
- description: UUID of template.
- type: str
- templateVersion:
- description: Current version of template.
- type: str
+ import:
+ description: Import the project/template details.
type: dict
- version:
- description: Current version of template.
- type: str
- versionDescription:
- description: Template version comments.
- type: str
+ suboptions:
+ project:
+ description: Import the project details.
+ type: dict
+ suboptions:
+ do_version:
+ description: DoVersion query parameter. If this flag is true, creates a new
+ version of the template with the imported contents, if the templates already
+ exists. " If false and if template already exists, then operation
+ fails with 'Template already exists' error.
+ type: bool
+ template:
+ description: Import the template details.
+ type: dict
+ suboptions:
+ do_version:
+ description: DoVersion query parameter. If this flag is true, creates a new
+ version of the template with the imported contents, if the templates already
+ exists. " If false and if template already exists, then operation
+ fails with 'Template already exists' error.
+ type: bool
+ payload:
+ description: Configuration Template Import Template's payload.
+ elements: dict
+ suboptions:
+ author:
+ description: Author of template.
+ type: str
+ composite:
+ description: Is it composite template.
+ type: bool
+ containing_templates:
+ description: Configuration Template Import Template's containingTemplates.
+ elements: dict
+ suboptions:
+ composite:
+ description: Is it composite template.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Import Template's deviceTypes.
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ type: list
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language (JINJA or VELOCITY).
+ type: str
+ name:
+ description: Name of template.
+ type: str
+ project_name:
+ description: Project name.
+ type: str
+ rollback_template_params:
+ description: Configuration Template Import Template's rollbackTemplateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ tags:
+ description: Configuration Template Import Template's tags.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Import Template's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ version:
+ description: Current version of template.
+ type: str
+ type: list
+ create_time:
+ description: Create time of template.
+ type: int
+ custom_params_order:
+ description: Custom Params Order.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Import Template's deviceTypes.
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ type: list
+ failure_policy:
+ description: Define failure policy if template provisioning fails.
+ type: str
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language (JINJA or VELOCITY).
+ type: str
+ last_update_time:
+ description: Update time of template.
+ type: int
+ latest_version_time:
+ description: Latest versioned template time.
+ type: int
+ name:
+ description: Name of template.
+ type: str
+ parent_template_id:
+ description: Parent templateID.
+ type: str
+ project_id:
+ description: Project UUID.
+ type: str
+ project_name:
+ description: Project name.
+ type: str
+ rollback_template_content:
+ description: Rollback template content.
+ type: str
+ rollback_template_params:
+ description: Configuration Template Import Template's rollbackTemplateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ software_type:
+ description: Applicable device software type.
+ type: str
+ software_variant:
+ description: Applicable device software variant.
+ type: str
+ software_version:
+ description: Applicable device software version.
+ type: str
+ tags:
+ description: Configuration Template Import Template's tags.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Import Template's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ validation_errors:
+ description: Configuration Template Import Template's validationErrors.
+ suboptions:
+ rollback_template_errors:
+ description: Validation or design conflicts errors of rollback template.
+ type: dict
+ template_errors:
+ description: Validation or design conflicts errors.
+ type: dict
+ template_id:
+ description: UUID of template.
+ type: str
+ template_version:
+ description: Current version of template.
+ type: str
+ type: dict
+ version:
+ description: Current version of template.
+ type: str
+ type: list
+ project_name:
+ description: ProjectName path parameter. Project name to create template under the
+ project.
+ type: str
+
requirements:
- dnacentersdk == 2.4.5
- python >= 3.5
@@ -543,16 +1110,24 @@ notes:
configuration_templates.ConfigurationTemplates.create_template,
configuration_templates.ConfigurationTemplates.deletes_the_template,
configuration_templates.ConfigurationTemplates.update_template,
+ configuration_templates.ConfigurationTemplates.export_projects,
+ configuration_templates.ConfigurationTemplates.export_templates,
+ configuration_templates.ConfigurationTemplates.imports_the_projects_provided,
+ configuration_templates.ConfigurationTemplates.imports_the_templates_provided,
- Paths used are
post /dna/intent/api/v1/template-programmer/project/{projectId}/template,
delete /dna/intent/api/v1/template-programmer/template/{templateId},
put /dna/intent/api/v1/template-programmer/template,
+ post /dna/intent/api/v1/template-programmer/project/name/exportprojects,
+ post /dna/intent/api/v1/template-programmer/template/exporttemplates,
+ post /dna/intent/api/v1/template-programmer/project/importprojects,
+ post /dna/intent/api/v1/template-programmer/project/name/{projectName}/template/importtemplates,
"""
EXAMPLES = r"""
-- name: Create a new template
+- name: Create a new template, export and import the project and template.
cisco.dnac.template_intent:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -562,47 +1137,96 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
state: merged
+ config_verify: True
config:
+ - configuration_templates:
author: string
composite: true
- createTime: 0
- customParamsOrder: true
+ create_time: 0
+ custom_params_order: true
description: string
- deviceTypes:
- - productFamily: string
- productSeries: string
- productType: string
- failurePolicy: string
+ device_types:
+ - product_family: string
+ product_series: string
+ product_type: string
+ failure_policy: string
id: string
language: string
- lastUpdateTime: 0
- latestVersionTime: 0
+ last_update_time: 0
+ latest_version_time: 0
name: string
- parentTemplateId: string
- projectId: string
- projectName: string
- rollbackTemplateContent: string
- softwareType: string
- softwareVariant: string
- softwareVersion: string
+ parent_template_id: string
+ project_id: string
+ project_name: string
+ project_description: string
+ rollback_template_content: string
+ software_type: string
+ software_variant: string
+ software_version: string
tags:
- id: string
name: string
- templateContent: string
- validationErrors:
- rollbackTemplateErrors:
+ template_content: string
+ validation_errors:
+ rollback_template_errors:
- {}
- templateErrors:
+ template_errors:
- {}
- templateId: string
- templateVersion: string
+ template_id: string
+ template_version: string
version: string
+ export:
+ project:
+ - string
+ template:
+ - project_name : string
+ template_name: string
+ import:
+ project:
+ do_version: true
+ export:
+ do_version: true
+ payload:
+ - author: string
+ composite: true
+ containing_templates:
+ - composite: true
+ description: string
+ device_types:
+ - product_family: string
+ product_series: string
+ product_type: string
+ id: string
+ language: string
+ name: string
+ project_name: string
+ rollback_template_params:
+ - binding: string
+ custom_order: 0
+ data_type: string
+ default_value: string
+ description: string
+ display_name: string
+ group: string
+ id: string
+ instruction_text: string
+ key: string
+ not_param: true
+ order: 0
+ param_array: true
+ parameter_name: string
+ provider: string
+ range:
+ - id: string
+ project_name: string
+
"""
RETURN = r"""
-#Case_1: Successful creation/updation/deletion of template
+# Case_1: Successful creation/updation/deletion of template/project
response_1:
description: A dictionary with versioning details of the template as returned by the DNAC Python SDK
returned: always
@@ -625,7 +1249,7 @@ response_1:
"msg": String
}
-#Case_2: Error while deleting a template or when given project is not found
+# Case_2: Error while deleting a template or when given project is not found
response_2:
description: A list with the response returned by the Cisco DNAC Python SDK
returned: always
@@ -636,7 +1260,7 @@ response_2:
"msg": String
}
-#Case_3: Given template already exists and requires no udpate
+# Case_3: Given template already exists and requires no update
response_3:
description: A dictionary with the exisiting template deatails as returned by the Cisco DNAC Python SDK
returned: always
@@ -646,223 +1270,926 @@ response_3:
"response": {},
"msg": String
}
+
+# Case_4: Given template list that needs to be exported
+response_4:
+ description: Details of the templates in the list as returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {},
+ "msg": String
+ }
+
+# Case_5: Given project list that needs to be exported
+response_5:
+ description: Details of the projects in the list as returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {},
+ "msg": String
+ }
+
"""
import copy
+from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
- DNACSDK,
+ DnacBase,
validate_list_of_dicts,
- log,
get_dict_result,
dnac_compare_equality,
)
-from ansible.module_utils.basic import AnsibleModule
-class DnacTemplate:
+class DnacTemplate(DnacBase):
+ """Class containing member attributes for template intent module"""
def __init__(self, module):
- self.module = module
- self.params = module.params
- self.config = copy.deepcopy(module.params.get("config"))
- self.have_create = {}
- self.want_create = {}
- self.validated = []
- dnac_params = self.get_dnac_params(self.params)
- log(str(dnac_params))
- self.dnac = DNACSDK(params=dnac_params)
- self.log = dnac_params.get("dnac_log")
-
- self.result = dict(changed=False, diff=[], response=[], warnings=[])
-
- def get_state(self):
- return self.params.get("state")
+ super().__init__(module)
+ self.have_project = {}
+ self.have_template = {}
+ self.supported_states = ["merged", "deleted"]
+ self.accepted_languages = ["JINJA", "VELOCITY"]
+ self.export_template = []
+ self.result['response'].append({})
def validate_input(self):
- temp_spec = dict(
- tags=dict(type="list"),
- author=dict(type="str"),
- composite=dict(type="bool"),
- containingTemplates=dict(type="list"),
- createTime=dict(type="int"),
- customParamsOrder=dict(type="bool"),
- description=dict(type="str"),
- deviceTypes=dict(type="list", elements='dict'),
- failurePolicy=dict(type="str"),
- id=dict(type="str"),
- language=dict(type="str"),
- lastUpdateTime=dict(type="int"),
- latestVersionTime=dict(type="int"),
- name=dict(type="str"),
- parentTemplateId=dict(type="str"),
- projectId=dict(type="str"),
- projectName=dict(required=True, type="str"),
- rollbackTemplateContent=dict(type="str"),
- rollbackTemplateParams=dict(type="list"),
- softwareType=dict(type="str"),
- softwareVariant=dict(type="str"),
- softwareVersion=dict(type="str"),
- templateContent=dict(type="str"),
- templateParams=dict(type="list"),
- templateName=dict(required=True, type='str'),
- validationErrors=dict(type="dict"),
- version=dict(type="str"),
- versionDescription=dict(type='str'),
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validattion"
+ self.status = "success"
+ return self
+
+ temp_spec = {
+ "configuration_templates": {
+ 'type': 'dict',
+ 'tags': {'type': 'list'},
+ 'author': {'type': 'str'},
+ 'composite': {'type': 'bool'},
+ 'containing_templates': {'type': 'list'},
+ 'create_time': {'type': 'int'},
+ 'custom_params_order': {'type': 'bool'},
+ 'description': {'type': 'str'},
+ 'device_types': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'product_family': {'type': 'str'},
+ 'product_series': {'type': 'str'},
+ 'product_type': {'type': 'str'},
+ },
+ 'failure_policy': {'type': 'str'},
+ 'id': {'type': 'str'},
+ 'language': {'type': 'str'},
+ 'last_update_time': {'type': 'int'},
+ 'latest_version_time': {'type': 'int'},
+ 'name': {'type': 'str'},
+ 'parent_template_id': {'type': 'str'},
+ 'project_id': {'type': 'str'},
+ 'project_name': {'type': 'str'},
+ 'project_description': {'type': 'str'},
+ 'rollback_template_content': {'type': 'str'},
+ 'rollback_template_params': {'type': 'list'},
+ 'software_type': {'type': 'str'},
+ 'software_variant': {'type': 'str'},
+ 'software_version': {'type': 'str'},
+ 'template_content': {'type': 'str'},
+ 'template_params': {'type': 'list'},
+ 'template_name': {'type': 'str'},
+ 'validation_errors': {'type': 'dict'},
+ 'version': {'type': 'str'},
+ 'version_description': {'type': 'str'}
+ },
+ 'export': {
+ 'type': 'dict',
+ 'project': {'type': 'list', 'elements': 'str'},
+ 'template': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'project_name': {'type': 'str'},
+ 'template_name': {'type': 'str'}
+ }
+ },
+ 'import': {
+ 'type': 'dict',
+ 'project': {
+ 'type': 'dict',
+ 'do_version': {'type': 'str', 'default': 'False'},
+ },
+ 'template': {
+ 'type': 'dict',
+ 'do_version': {'type': 'str', 'default': 'False'},
+ 'payload': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'tags': {'type': 'list'},
+ 'author': {'type': 'str'},
+ 'composite': {'type': 'bool'},
+ 'containing_templates': {'type': 'list'},
+ 'create_time': {'type': 'int'},
+ 'custom_params_order': {'type': 'bool'},
+ 'description': {'type': 'str'},
+ 'device_types': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'product_family': {'type': 'str'},
+ 'product_series': {'type': 'str'},
+ 'product_type': {'type': 'str'},
+ },
+ 'failure_policy': {'type': 'str'},
+ 'id': {'type': 'str'},
+ 'language': {'type': 'str'},
+ 'last_update_time': {'type': 'int'},
+ 'latest_version_time': {'type': 'int'},
+ 'name': {'type': 'str'},
+ 'parent_template_id': {'type': 'str'},
+ 'project_id': {'type': 'str'},
+ 'project_name': {'type': 'str'},
+ 'project_description': {'type': 'str'},
+ 'rollback_template_content': {'type': 'str'},
+ 'rollback_template_params': {'type': 'list'},
+ 'software_type': {'type': 'str'},
+ 'software_variant': {'type': 'str'},
+ 'software_version': {'type': 'str'},
+ 'template_content': {'type': 'str'},
+ 'template_params': {'type': 'list'},
+ 'template_name': {'type': 'str'},
+ 'validation_errors': {'type': 'dict'},
+ 'version': {'type': 'str'}
+ }
+ }
+ }
+ }
+ # Validate template params
+ self.config = self.camel_to_snake_case(self.config)
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input"
+ self.status = "success"
+ return self
+
+ def get_project_params(self, params):
+ """
+ Store project parameters from the playbook for template processing in DNAC.
+
+ Parameters:
+ params (dict) - Playbook details containing Project information.
+
+ Returns:
+ project_params (dict) - Organized Project parameters.
+ """
+
+ project_params = {"name": params.get("project_name"),
+ "description": params.get("project_description")
+ }
+ return project_params
+
+ def get_tags(self, _tags):
+ """
+ Store tags from the playbook for template processing in DNAC.
+ Check using check_return_status()
+
+ Parameters:
+ tags (dict) - Tags details containing Template information.
+
+ Returns:
+ tags (dict) - Organized tags parameters.
+ """
+
+ if _tags is None:
+ return None
+
+ tags = []
+ i = 0
+ for item in _tags:
+ tags.append({})
+ id = item.get("id")
+ if id is not None:
+ tags[i].update({"id": id})
+
+ name = item.get("name")
+ if name is not None:
+ tags[i].update({"name": name})
+ else:
+ self.msg = "name is mandatory in tags in location " + str(i)
+ self.status = "failed"
+ return self.check_return_status()
+
+ return tags
+
+ def get_device_types(self, device_types):
+ """
+ Store device types parameters from the playbook for template processing in DNAC.
+ Check using check_return_status()
+
+ Parameters:
+ device_types (dict) - Device types details containing Template information.
+
+ Returns:
+ deviceTypes (dict) - Organized device types parameters.
+ """
+
+ if device_types is None:
+ return None
+
+ deviceTypes = []
+ i = 0
+ for item in device_types:
+ deviceTypes.append({})
+ product_family = item.get("product_family")
+ if product_family is not None:
+ deviceTypes[i].update({"productFamily": product_family})
+ else:
+ self.msg = "product_family is mandatory for deviceTypes"
+ self.status = "failed"
+ return self.check_return_status()
+
+ product_series = item.get("product_series")
+ if product_series is not None:
+ deviceTypes[i].update({"productSeries": product_series})
+ product_type = item.get("product_type")
+ if product_type is not None:
+ deviceTypes[i].update({"productType": product_type})
+ i = i + 1
+
+ return deviceTypes
+
+ def get_validation_errors(self, validation_errors):
+ """
+ Store template parameters from the playbook for template processing in DNAC.
+
+ Parameters:
+ validation_errors (dict) - Playbook details containing validation errors information.
+
+ Returns:
+ validationErrors (dict) - Organized validation errors parameters.
+ """
+
+ if validation_errors is None:
+ return None
+
+ validationErrors = {}
+ rollback_template_errors = validation_errors.get("rollback_template_errors")
+ if rollback_template_errors is not None:
+ validationErrors.update({
+ "rollbackTemplateErrors": rollback_template_errors
+ })
+
+ template_errors = validation_errors.get("template_errors")
+ if template_errors is not None:
+ validationErrors.update({
+ "templateErrors": template_errors
+ })
+
+ template_id = validation_errors.get("template_id")
+ if template_id is not None:
+ validationErrors.update({
+ "templateId": template_id
+ })
+
+ template_version = validation_errors.get("template_version")
+ if template_version is not None:
+ validationErrors.update({
+ "templateVersion": template_version
+ })
+
+ return validationErrors
+
+ def get_template_info(self, template_params):
+ """
+ Store template params from the playbook for template processing in DNAC.
+ Check using check_return_status()
+
+ Parameters:
+ template_params (dict) - Playbook details containing template params information.
+
+ Returns:
+ templateParams (dict) - Organized template params parameters.
+ """
+
+ if template_params is None:
+ return None
+
+ templateParams = []
+ i = 0
+ self.log("Template params details: {0}".format(template_params), "DEBUG")
+ for item in template_params:
+ self.log("Template params items: {0}".format(item), "DEBUG")
+ templateParams.append({})
+ binding = item.get("binding")
+ if binding is not None:
+ templateParams[i].update({"binding": binding})
+
+ custom_order = item.get("custom_order")
+ if custom_order is not None:
+ templateParams[i].update({"customOrder": custom_order})
+
+ default_value = item.get("default_value")
+ if default_value is not None:
+ templateParams[i].update({"defaultValue": default_value})
+
+ description = item.get("description")
+ if description is not None:
+ templateParams[i].update({"description": description})
+
+ display_name = item.get("display_name")
+ if display_name is not None:
+ templateParams[i].update({"displayName": display_name})
+
+ group = item.get("group")
+ if group is not None:
+ templateParams[i].update({"group": group})
+
+ id = item.get("id")
+ if id is not None:
+ templateParams[i].update({"id": id})
+
+ instruction_text = item.get("instruction_text")
+ if instruction_text is not None:
+ templateParams[i].update({"instructionText": instruction_text})
+
+ key = item.get("key")
+ if key is not None:
+ templateParams[i].update({"key": key})
+
+ not_param = item.get("not_param")
+ if not_param is not None:
+ templateParams[i].update({"notParam": not_param})
+
+ order = item.get("order")
+ if order is not None:
+ templateParams[i].update({"order": order})
+
+ param_array = item.get("param_array")
+ if param_array is not None:
+ templateParams[i].update({"paramArray": param_array})
+
+ provider = item.get("provider")
+ if provider is not None:
+ templateParams[i].update({"provider": provider})
+
+ parameter_name = item.get("parameter_name")
+ if parameter_name is not None:
+ templateParams[i].update({"parameterName": parameter_name})
+ else:
+ self.msg = "parameter_name is mandatory for the template_params."
+ self.status = "failed"
+ return self.check_return_status()
+
+ data_type = item.get("data_type")
+ datatypes = ["STRING", "INTEGER", "IPADDRESS", "MACADDRESS", "SECTIONDIVIDER"]
+ if data_type is not None:
+ templateParams[i].update({"dataType": data_type})
+ else:
+ self.msg = "dataType is mandatory for the template_params."
+ self.status = "failed"
+ return self.check_return_status()
+ if data_type not in datatypes:
+ self.msg = "data_type under template_params should be in " + str(datatypes)
+ self.status = "failed"
+ return self.check_return_status()
+
+ required = item.get("required")
+ if required is not None:
+ templateParams[i].update({"required": required})
+
+ range = item.get("range")
+ self.log("Template params range list: {0}".format(range), "DEBUG")
+ if range is not None:
+ templateParams[i].update({"range": []})
+ _range = templateParams[i].get("range")
+ self.log("Template params range: {0}".format(_range), "DEBUG")
+ j = 0
+ for value in range:
+ _range.append({})
+ id = value.get("id")
+ if id is not None:
+ _range[j].update({"id": id})
+ max_value = value.get("max_value")
+ if max_value is not None:
+ _range[j].update({"maxValue": max_value})
+ else:
+ self.msg = "max_value is mandatory for range under template_params"
+ self.status = "failed"
+ return self.check_return_status()
+ min_value = value.get("min_value")
+ if min_value is not None:
+ _range[j].update({"maxValue": min_value})
+ else:
+ self.msg = "min_value is mandatory for range under template_params"
+ self.status = "failed"
+ return self.check_return_status()
+ j = j + 1
+
+ self.log("Template params details: {0}".format(templateParams), "DEBUG")
+ selection = item.get("selection")
+ self.log("Template params selection: {0}".format(selection), "DEBUG")
+ if selection is not None:
+ templateParams[i].update({"selection": {}})
+ _selection = templateParams[i].get("selection")
+ id = selection.get("id")
+ if id is not None:
+ _selection.update({"id": id})
+ default_selected_values = selection.get("default_selected_values")
+ if default_selected_values is not None:
+ _selection.update({"defaultSelectedValues": default_selected_values})
+ selection_values = selection.get("selection_values")
+ if selection_values is not None:
+ _selection.update({"selectionValues": selection_values})
+ selection_type = selection.get("selection_type")
+ if selection_type is not None:
+ _selection.update({"selectionType": selection_type})
+ i = i + 1
+
+ return templateParams
+
+ def get_containing_templates(self, containing_templates):
+ """
+ Store tags from the playbook for template processing in DNAC.
+ Check using check_return_status()
+
+ Parameters:
+ containing_templates (dict) - Containing tempaltes details
+ containing Template information.
+
+ Returns:
+ containingTemplates (dict) - Organized containing templates parameters.
+ """
+
+ if containing_templates is None:
+ return None
+
+ containingTemplates = []
+ i = 0
+ for item in containing_templates:
+ containingTemplates.append({})
+ _tags = item.get("tags")
+ if _tags is not None:
+ containingTemplates[i].update({"tags": self.get_tags(_tags)})
+
+ composite = item.get("composite")
+ if composite is not None:
+ containingTemplates[i].update({"composite": composite})
+
+ description = item.get("description")
+ if description is not None:
+ containingTemplates[i].update({"description": description})
+
+ device_types = item.get("device_types")
+ if device_types is not None:
+ containingTemplates[i].update({
+ "deviceTypes": self.get_device_types(device_types)
+ })
+
+ id = item.get("id")
+ if id is not None:
+ containingTemplates[i].update({"id": id})
+
+ name = item.get("name")
+ if name is None:
+ self.msg = "name is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
+
+ containingTemplates[i].update({"name": name})
+
+ language = item.get("language")
+ if language is None:
+ self.msg = "language is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
+
+ language_list = ["JINJA", "VELOCITY"]
+ if language not in language_list:
+ self.msg = "language under containing templates should be in " + str(language_list)
+ self.status = "failed"
+ return self.check_return_status()
+
+ containingTemplates[i].update({"language": language})
+
+ project_name = item.get("project_name")
+ if project_name is not None:
+ containingTemplates[i].update({"projectName": project_name})
+ else:
+ self.msg = "project_name is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
- if self.config:
- msg = None
- # Validate template params
- valid_temp, invalid_params = validate_list_of_dicts(
- self.config, temp_spec
- )
+ rollback_template_params = item.get("rollback_template_params")
+ if rollback_template_params is not None:
+ containingTemplates[i].update({
+ "rollbackTemplateParams": self.get_template_info(rollback_template_params)
+ })
- if invalid_params:
- msg = "Invalid parameters in playbook: {0}".format(
- "\n".join(invalid_params)
- )
- self.module.fail_json(msg=msg)
+ template_content = item.get("template_content")
+ if template_content is not None:
+ containingTemplates[i].update({"templateContent": template_content})
- self.validated = valid_temp
+ template_params = item.get("template_params")
+ if template_params is not None:
+ containingTemplates[i].update({
+ "templateParams": self.get_template_info(template_params)
+ })
- if self.log:
- log(str(valid_temp))
- log(str(self.validated))
+ version = item.get("version")
+ if version is not None:
+ containingTemplates[i].update({"version": version})
- if self.params.get("state") == "merged":
- for temp in self.validated:
- if not temp.get("language") or not temp.get("deviceTypes") \
- or not temp.get("softwareType"):
- msg = "missing required arguments: language or deviceTypes or softwareType"
- self.module.fail_json(msg=msg)
- if not (temp.get("language").lower() == "velocity" or
- temp.get("language").lower() == "jinja"):
- msg = "Invalid parameters in playbook: {0} : Invalid choice provided".format(
- "".join(temp.get("language")))
- self.module.fail_json(msg=msg)
-
- def get_dnac_params(self, params):
- dnac_params = dict(
- dnac_host=params.get("dnac_host"),
- dnac_port=params.get("dnac_port"),
- dnac_username=params.get("dnac_username"),
- dnac_password=params.get("dnac_password"),
- dnac_verify=params.get("dnac_verify"),
- dnac_debug=params.get("dnac_debug"),
- dnac_log=params.get("dnac_log")
- )
- return dnac_params
+ return containingTemplates
def get_template_params(self, params):
- temp_params = dict(
- tags=params.get("template_tag"),
- author=params.get("author"),
- composite=params.get("composite"),
- containingTemplates=params.get("containingTemplates"),
- createTime=params.get("createTime"),
- customParamsOrder=params.get("customParamsOrder"),
- description=params.get("template_description"),
- deviceTypes=params.get("deviceTypes"),
- failurePolicy=params.get("failurePolicy"),
- id=params.get("templateId"),
- language=params.get("language").upper(),
- lastUpdateTime=params.get("lastUpdateTime"),
- latestVersionTime=params.get("latestVersionTime"),
- name=params.get("templateName"),
- parentTemplateId=params.get("parentTemplateId"),
- projectId=params.get("projectId"),
- projectName=params.get("projectName"),
- rollbackTemplateContent=params.get("rollbackTemplateContent"),
- rollbackTemplateParams=params.get("rollbackTemplateParams"),
- softwareType=params.get("softwareType"),
- softwareVariant=params.get("softwareVariant"),
- softwareVersion=params.get("softwareVersion"),
- templateContent=params.get("templateContent"),
- templateParams=params.get("templateParams"),
- validationErrors=params.get("validationErrors"),
- version=params.get("version"),
- project_id=params.get("projectId"),
- )
+ """
+ Store template parameters from the playbook for template processing in DNAC.
+
+ Parameters:
+ params (dict) - Playbook details containing Template information.
+
+ Returns:
+ temp_params (dict) - Organized template parameters.
+ """
+
+ self.log("Template params playbook details: {0}".format(params), "DEBUG")
+ temp_params = {
+ "tags": self.get_tags(params.get("template_tag")),
+ "author": params.get("author"),
+ "composite": params.get("composite"),
+ "containingTemplates":
+ self.get_containing_templates(params.get("containing_templates")),
+ "createTime": params.get("create_time"),
+ "customParamsOrder": params.get("custom_params_order"),
+ "description": params.get("template_description"),
+ "deviceTypes":
+ self.get_device_types(params.get("device_types")),
+ "failurePolicy": params.get("failure_policy"),
+ "id": params.get("id"),
+ "language": params.get("language").upper(),
+ "lastUpdateTime": params.get("last_update_time"),
+ "latestVersionTime": params.get("latest_version_time"),
+ "name": params.get("template_name"),
+ "parentTemplateId": params.get("parent_template_id"),
+ "projectId": params.get("project_id"),
+ "projectName": params.get("project_name"),
+ "rollbackTemplateContent": params.get("rollback_template_content"),
+ "rollbackTemplateParams":
+ self.get_template_info(params.get("rollback_template_params")),
+ "softwareType": params.get("software_type"),
+ "softwareVariant": params.get("software_variant"),
+ "softwareVersion": params.get("software_version"),
+ "templateContent": params.get("template_content"),
+ "templateParams":
+ self.get_template_info(params.get("template_params")),
+ "validationErrors":
+ self.get_validation_errors(params.get("validation_errors")),
+ "version": params.get("version"),
+ "project_id": params.get("project_id")
+ }
+ self.log("Formatted template params details: {0}".format(temp_params), "DEBUG")
+ copy_temp_params = copy.deepcopy(temp_params)
+ for item in copy_temp_params:
+ if temp_params[item] is None:
+ del temp_params[item]
+ self.log("Formatted template params details: {0}".format(temp_params), "DEBUG")
return temp_params
- def get_template(self):
- result = None
+ def get_template(self, config):
+ """
+ Get the template needed for updation or creation.
- for temp in self.validated:
- items = self.dnac._exec(
- family="configuration_templates",
- function="get_template_details",
- params={"template_id": temp.get("templateId")}
- )
+ Parameters:
+ config (dict) - Playbook details containing Template information.
- if items:
- result = items
+ Returns:
+ result (dict) - Template details for the given template ID.
+ """
- if self.log:
- log(str(items))
+ result = None
+ items = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="get_template_details",
+ params={"template_id": config.get("templateId")}
+ )
+ if items:
+ result = items
+ self.log("Received API response from 'get_template_details': {0}".format(items), "DEBUG")
self.result['response'] = items
return result
- def get_have(self):
- prev_template = None
- template_exists = False
- have_create = {}
-
- # Get available templates. Filter templates based on provided projectName
- for temp in self.validated:
- template_list = self.dnac._exec(
- family="configuration_templates",
- function='gets_the_templates_available',
- params={"project_names": temp.get("projectName")},
- )
- # API execution error returns a dict
- if template_list and isinstance(template_list, list):
- template_details = get_dict_result(template_list, 'name', temp.get("templateName"))
-
- if template_details:
- temp["templateId"] = template_details.get("templateId")
- have_create["templateId"] = template_details.get("templateId")
- prev_template = self.get_template()
-
- if self.log:
- log(str(prev_template))
+ def get_have_project(self, config):
+ """
+ Get the current project related information from DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing Project information.
+
+ Returns:
+ template_available (list) - Current project information.
+ """
+
+ have_project = {}
+ given_projectName = config.get("configuration_templates").get("project_name")
+ template_available = None
+
+ # Check if project exists.
+ project_details = self.get_project_details(given_projectName)
+ # DNAC returns project details even if the substring matches.
+ # Hence check the projectName retrieved from DNAC.
+ if not (project_details and isinstance(project_details, list)):
+ self.log("Project: {0} not found, need to create new project in DNAC"
+ .format(given_projectName), "INFO")
+ return None
+
+ fetched_projectName = project_details[0].get('name')
+ if fetched_projectName != given_projectName:
+ self.log("Project {0} provided is not exact match in DNAC DB"
+ .format(given_projectName), "INFO")
+ return None
+
+ template_available = project_details[0].get('templates')
+ have_project["project_found"] = True
+ have_project["id"] = project_details[0].get("id")
+ have_project["isDeletable"] = project_details[0].get("isDeletable")
+
+ self.have_project = have_project
+ return template_available
+
+ def get_have_template(self, config, template_available):
+ """
+ Get the current template related information from DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing Template information.
+ template_available (list) - Current project information.
+
+ Returns:
+ self
+ """
+
+ projectName = config.get("configuration_templates").get("project_name")
+ templateName = config.get("configuration_templates").get("template_name")
+ template = None
+ have_template = {}
+
+ have_template["isCommitPending"] = False
+ have_template["template_found"] = False
+
+ template_details = get_dict_result(template_available,
+ "name",
+ templateName)
+ # Check if specified template in playbook is available
+ if not template_details:
+ self.log("Template {0} not found in project {1}"
+ .format(templateName, projectName), "INFO")
+ self.msg = "Template : {0} missing, new template to be created".format(templateName)
+ self.status = "success"
+ return self
+
+ config["templateId"] = template_details.get("id")
+ have_template["id"] = template_details.get("id")
+ # Get available templates which are committed under the project
+ template_list = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="gets_the_templates_available",
+ params={"projectNames": config.get("projectName")},
+ )
+ have_template["isCommitPending"] = True
+ # This check will fail if specified template is there not committed in dnac
+ if template_list and isinstance(template_list, list):
+ template_info = get_dict_result(template_list,
+ "name",
+ templateName)
+ if template_info:
+ template = self.get_template(config)
+ have_template["template"] = template
+ have_template["isCommitPending"] = False
+ have_template["template_found"] = template is not None \
+ and isinstance(template, dict)
+ self.log("Template {0} is found and template "
+ "details are :{1}".format(templateName, str(template)), "INFO")
+
+ # There are committed templates in the project but the
+ # one specified in the playbook may not be committed
+ self.log("Commit pending for template name {0}"
+ " is {1}".format(templateName, have_template.get('isCommitPending')), "INFO")
+
+ self.have_template = have_template
+ self.msg = "Successfully collected all template parameters from dnac for comparison"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current project and template details from DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing Project/Template information.
+
+ Returns:
+ self
+ """
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ if not configuration_templates.get("project_name"):
+ self.msg = "Mandatory Parameter project_name not available"
+ self.status = "failed"
+ return self
+ template_available = self.get_have_project(config)
+ if template_available:
+ self.get_have_template(config, template_available)
+
+ self.msg = "Successfully collected all project and template \
+ parameters from dnac for comparison"
+ self.status = "success"
+ return self
+
+ def get_project_details(self, projectName):
+ """
+ Get the details of specific project name provided.
+
+ Parameters:
+ projectName (str) - Project Name
+
+ Returns:
+ items (dict) - Project details with given project name.
+ """
+
+ items = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function='get_projects',
+ op_modifies=True,
+ params={"name": projectName},
+ )
+ return items
- template_exists = prev_template is not None and isinstance(prev_template, dict)
- else:
- self.module.fail_json(msg="Project Not Found", response=[])
+ def get_want(self, config):
+ """
+ Get all the template and project related information from playbook
+ that is needed to be created in DNAC.
- have_create['template'] = prev_template
- have_create['template_found'] = template_exists
- self.have_create = have_create
+ Parameters:
+ config (dict) - Playbook details.
- def get_want(self):
- want_create = {}
+ Returns:
+ self
+ """
- for temp in self.validated:
- template_params = self.get_template_params(temp)
- version_comments = temp.get("versionDescription")
+ want = {}
+ configuration_templates = config.get("configuration_templates")
+ self.log("Playbook details: {0}".format(config), "INFO")
+ if configuration_templates:
+ template_params = self.get_template_params(configuration_templates)
+ project_params = self.get_project_params(configuration_templates)
+ version_comments = configuration_templates.get("version_description")
- if self.params.get("state") == "merged" and \
- not self.have_create.get("template_found"):
+ if self.params.get("state") == "merged":
+ self.update_mandatory_parameters(template_params)
+
+ want["template_params"] = template_params
+ want["project_params"] = project_params
+ want["comments"] = version_comments
+
+ self.want = want
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.status = "success"
+ return self
+
+ def create_project_or_template(self, is_create_project=False):
+ """
+ Call DNAC API to create project or template based on the input provided.
+
+ Parameters:
+ is_create_project (bool) - Default value is False.
+
+ Returns:
+ creation_id (str) - Project Id.
+ created (str) - True if Project created, else False.
+ """
+
+ creation_id = None
+ created = False
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ template_params = self.want.get("template_params")
+ project_params = self.want.get("project_params")
+
+ if is_create_project:
+ params_key = project_params
+ name = "project: {0}".format(project_params.get('name'))
+ validation_string = "Successfully created project"
+ creation_value = "create_project"
+ else:
+ params_key = template_params
+ name = "template: {0}".format(template_params.get('name'))
+ validation_string = "Successfully created template"
+ creation_value = "create_template"
+
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function=creation_value,
+ op_modifies=True,
+ params=params_key,
+ )
+ if not isinstance(response, dict):
+ self.log("Response of '{0}' is not in dictionary format."
+ .format(creation_value), "CRITICAL")
+ return creation_id, created
+
+ task_id = response.get("response").get("taskId")
+ if not task_id:
+ self.log("Task id {0} not found for '{1}'.".format(task_id, creation_value), "CRITICAL")
+ return creation_id, created
+
+ while not created:
+ task_details = self.get_task_details(task_id)
+ if not task_details:
+ self.log("Failed to get task details of '{0}' for taskid: {1}"
+ .format(creation_value, task_id), "CRITICAL")
+ return creation_id, created
+
+ self.log("Task details for {0}: {1}".format(creation_value, task_details), "DEBUG")
+ if task_details.get("isError"):
+ self.log("Error occurred for '{0}' with taskid: {1}"
+ .format(creation_value, task_id), "ERROR")
+ return creation_id, created
+
+ if validation_string not in task_details.get("progress"):
+ self.log("'{0}' progress set to {1} for taskid: {2}"
+ .format(creation_value, task_details.get('progress'), task_id), "DEBUG")
+ continue
+
+ task_details_data = task_details.get("data")
+ value = self.check_string_dictionary(task_details_data)
+ if value is None:
+ creation_id = task_details.get("data")
+ else:
+ creation_id = value.get("templateId")
+ if not creation_id:
+ self.log("Export data is not found for '{0}' with taskid : {1}"
+ .format(creation_value, task_id), "DEBUG")
+ continue
+
+ created = True
+ if is_create_project:
# ProjectId is required for creating a new template.
# Store it with other template parameters.
- items = self.dnac._exec(
- family="configuration_templates",
- function='get_projects',
- params={"name": temp.get("projectName")},
- )
- template_params["projectId"] = items[0].get("id")
- template_params["project_id"] = items[0].get("id")
-
- want_create["template_params"] = template_params
- want_create["comments"] = version_comments
+ template_params["projectId"] = creation_id
+ template_params["project_id"] = creation_id
- self.want_create = want_create
+ self.log("New {0} created with id {1}".format(name, creation_id), "DEBUG")
+ return creation_id, created
def requires_update(self):
- current_obj = self.have_create.get("template")
- requested_obj = self.want_create.get("template_params")
+ """
+ Check if the template config given requires update.
+
+ Parameters:
+ self - Current object.
+
+ Returns:
+ bool - True if any parameter specified in obj_params differs between
+ current_obj and requested_obj, indicating that an update is required.
+ False if all specified parameters are equal.
+ """
+
+ if self.have_template.get("isCommitPending"):
+ self.log("Template '{0}' is in saved state and needs to be updated and committed."
+ .format(self.have_template.get("template").get("name")), "DEBUG")
+ return True
+
+ current_obj = self.have_template.get("template")
+ requested_obj = self.want.get("template_params")
+ self.log("Current State (have): {0}".format(current_obj), "INFO")
+ self.log("Desired State (want): {0}".format(requested_obj), "INFO")
obj_params = [
("tags", "tags", ""),
("author", "author", ""),
@@ -896,163 +2223,573 @@ class DnacTemplate:
requested_obj.get(ansible_param))
for (dnac_param, ansible_param, default) in obj_params)
- def get_task_details(self, id):
- result = None
- response = self.dnac._exec(
- family="task",
- function='get_task_by_id',
- params={"task_id": id},
- )
+ def update_mandatory_parameters(self, template_params):
+ """
+ Update parameters which are mandatory for creating a template.
+
+ Parameters:
+ template_params (dict) - Template information.
+
+ Returns:
+ None
+ """
+
+ # Mandate fields required for creating a new template.
+ # Store it with other template parameters.
+ template_params["projectId"] = self.have_project.get("id")
+ template_params["project_id"] = self.have_project.get("id")
+ # Update language,deviceTypes and softwareType if not provided for existing template.
+ if not template_params.get("language"):
+ template_params["language"] = self.have_template.get('template') \
+ .get('language')
+ if not template_params.get("deviceTypes"):
+ template_params["deviceTypes"] = self.have_template.get('template') \
+ .get('deviceTypes')
+ if not template_params.get("softwareType"):
+ template_params["softwareType"] = self.have_template.get('template') \
+ .get('softwareType')
+
+ def validate_input_merge(self, template_exists):
+ """
+ Validate input after getting all the parameters from DNAC.
+ "If mandate like deviceTypes, softwareType and language "
+ "already present in DNAC for a template."
+ "It is not required to be provided in playbook, "
+ "but if it is new creation error will be thrown to provide these fields.
+
+ Parameters:
+ template_exists (bool) - True if template exists, else False.
+
+ Returns:
+ None
+ """
+
+ template_params = self.want.get("template_params")
+ language = template_params.get("language").upper()
+ if language:
+ if language not in self.accepted_languages:
+ self.msg = "Invalid value language {0} ." \
+ "Accepted language values are {1}" \
+ .format(self.accepted_languages, language)
+ self.status = "failed"
+ return self
+ else:
+ template_params["language"] = "JINJA"
- if self.log:
- log(str(response))
+ if not template_exists:
+ if not template_params.get("deviceTypes") \
+ or not template_params.get("softwareType"):
+ self.msg = "DeviceTypes and SoftwareType are required arguments to create Templates"
+ self.status = "failed"
+ return self
- if isinstance(response, dict):
- result = response.get("response")
+ self.msg = "Input validated for merging"
+ self.status = "success"
+ return self
- return result
+ def get_export_template_values(self, export_values):
+ """
+ Get the export template values from the details provided by the playbook.
- def get_diff_merge(self):
- template_id = None
- template_ceated = False
- template_updated = False
- template_exists = self.have_create.get("template_found")
+ Parameters:
+ export_values (bool) - All the template available under the project.
- if template_exists:
- if self.requires_update():
- response = self.dnac._exec(
+ Returns:
+ self
+ """
+
+ template_details = self.dnac._exec(
+ family="configuration_templates",
+ function='get_projects_details'
+ )
+ for values in export_values:
+ project_name = values.get("project_name")
+ self.log("Project name for export template: {0}".format(project_name), "DEBUG")
+ template_details = template_details.get("response")
+ self.log("Template details: {0}".format(template_details), "DEBUG")
+ all_template_details = get_dict_result(template_details,
+ "name",
+ project_name)
+ self.log("Template details under the project name {0}: {1}"
+ .format(project_name, all_template_details), "DEBUG")
+ all_template_details = all_template_details.get("templates")
+ self.log("Template details under the project name {0}: {1}"
+ .format(project_name, all_template_details), "DEBUG")
+ template_name = values.get("template_name")
+ template_detail = get_dict_result(all_template_details,
+ "name",
+ template_name)
+ self.log("Template details with template name {0}: {1}"
+ .format(template_name, template_detail), "DEBUG")
+ if template_detail is None:
+ self.msg = "Invalid project_name and template_name in export"
+ self.status = "failed"
+ return self
+ self.export_template.append(template_detail.get("id"))
+
+ self.msg = "Successfully collected the export template IDs"
+ self.status = "success"
+ return self
+
+ def update_configuration_templates(self, config):
+ """
+ Update/Create templates and projects in DNAC with fields provided in DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ is_project_found = self.have_project.get("project_found")
+ if not is_project_found:
+ project_id, project_created = \
+ self.create_project_or_template(is_create_project=True)
+ if project_created:
+ self.log("project created with projectId: {0}".format(project_id), "DEBUG")
+ else:
+ self.status = "failed"
+ self.msg = "Project creation failed"
+ return self
+
+ is_template_found = self.have_template.get("template_found")
+ template_params = self.want.get("template_params")
+ self.log("Desired template details: {0}".format(template_params), "DEBUG")
+ self.log("Current template details: {0}".format(self.have_template), "DEBUG")
+ template_id = None
+ template_updated = False
+ self.validate_input_merge(is_template_found).check_return_status()
+ if is_template_found:
+ if self.requires_update():
+ template_id = self.have_template.get("id")
+ template_params.update({"id": template_id})
+ self.log("Current State (have): {0}".format(self.have_template), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="update_template",
+ params=template_params,
+ op_modifies=True,
+ )
+ template_updated = True
+ self.log("Updating existing template '{0}'."
+ .format(self.have_template.get("template").get("name")), "INFO")
+ else:
+ # Template does not need update
+ self.result.update({
+ 'response': self.have_template.get("template"),
+ 'msg': "Template does not need update"
+ })
+ self.status = "exited"
+ return self
+ else:
+ if template_params.get("name"):
+ template_id, template_updated = self.create_project_or_template()
+ else:
+ self.msg = "missing required arguments: template_name"
+ self.status = "failed"
+ return self
+
+ if template_updated:
+ # Template needs to be versioned
+ version_params = {
+ "comments": self.want.get("comments"),
+ "templateId": template_id
+ }
+ response = self.dnac_apply['exec'](
family="configuration_templates",
- function="update_template",
- params=self.want_create.get("template_params"),
+ function="version_template",
op_modifies=True,
+ params=version_params
)
- template_updated = True
- template_id = self.have_create.get("templateId")
-
- if self.log:
- log("Updating Existing Template")
- else:
- # Template does not need update
- self.result['response'] = self.have_create.get("template")
- self.result['msg'] = "Template does not need update"
- self.module.exit_json(**self.result)
- else:
- response = self.dnac._exec(
- family="configuration_templates",
- function='create_template',
- op_modifies=True,
- params=self.want_create.get("template_params"),
- )
-
- if self.log:
- log("Template created. Get template_id for versioning")
- if isinstance(response, dict):
- create_error = False
- task_details = {}
task_id = response.get("response").get("taskId")
-
- if task_id:
- while (True):
- task_details = self.get_task_details(task_id)
- if task_details and task_details.get("isError"):
- create_error = True
- break
-
- if task_details and ("Successfully created template" in task_details.get("progress")):
- break
- if not create_error:
- template_id = task_details.get("data")
- if template_id:
- template_created = True
-
- if template_updated or template_created:
- # Template needs to be versioned
- version_params = dict(
- comments=self.want_create.get("comments"),
- templateId=template_id
- )
- response = self.dnac._exec(
- family="configuration_templates",
- function='version_template',
- op_modifies=True,
- params=version_params
- )
- task_details = {}
- task_id = response.get("response").get("taskId")
-
- if task_id:
+ if not task_id:
+ self.msg = "Task id: {0} not found".format(task_id)
+ self.status = "failed"
+ return self
task_details = self.get_task_details(task_id)
self.result['changed'] = True
self.result['msg'] = task_details.get('progress')
- self.result['diff'] = self.validated
- if self.log:
- log(str(task_details))
+ self.result['diff'] = config.get("configuration_templates")
+ self.log("Task details for 'version_template': {0}".format(task_details), "DEBUG")
+ self.result['response'] = task_details if task_details else response
+
+ if not self.result.get('msg'):
+ self.msg = "Error while versioning the template"
+ self.status = "failed"
+ return self
+
+ def handle_export(self, config):
+ """
+ Export templates and projects in DNAC with fields provided in DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ export = config.get("export")
+ if export:
+ export_project = export.get("project")
+ self.log("Export project playbook details: {0}"
+ .format(export_project), "DEBUG")
+ if export_project:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='export_projects',
+ params={"payload": export_project},
+ )
+ validation_string = "successfully exported project"
+ self.check_task_response_status(response,
+ validation_string,
+ True).check_return_status()
+ self.result['response'][0].update({"exportProject": self.msg})
+
+ export_values = export.get("template")
+ if export_values:
+ self.get_export_template_values(export_values).check_return_status()
+ self.log("Exporting template playbook details: {0}"
+ .format(self.export_template), "DEBUG")
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='export_templates',
+ params={"payload": self.export_template},
+ )
+ validation_string = "successfully exported template"
+ self.check_task_response_status(response,
+ validation_string,
+ True).check_return_status()
+ self.result['response'][0].update({"exportTemplate": self.msg})
+
+ return self
+
+ def handle_import(self, config):
+ """
+ Import templates and projects in DNAC with fields provided in DNAC.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ _import = config.get("import")
+ if _import:
+ # _import_project = _import.get("project")
+ do_version = _import.get("project").get("do_version")
+ payload = None
+ if _import.get("project").get("payload"):
+ payload = _import.get("project").get("payload")
+ else:
+ self.msg = "Mandatory parameter payload is not found under import project"
+ self.status = "failed"
+ return self
+ _import_project = {
+ "doVersion": do_version,
+ # "payload": "{0}".format(payload)
+ "payload": payload
+ }
+ self.log("Importing project details from the playbook: {0}"
+ .format(_import_project), "DEBUG")
+ if _import_project:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='imports_the_projects_provided',
+ params=_import_project,
+ )
+ validation_string = "successfully imported project"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.result['response'][0].update({"importProject": validation_string})
+
+ _import_template = _import.get("template")
+ if _import_template.get("project_name"):
+ self.msg = "Mandatory paramter project_name is not found under import template"
+ self.status = "failed"
+ return self
+ if _import_template.get("payload"):
+ self.msg = "Mandatory paramter payload is not found under import template"
+ self.status = "failed"
+ return self
+
+ payload = _import_template.get("project_name")
+ import_template = {
+ "doVersion": _import_template.get("do_version"),
+ "projectName": _import_template.get("project_name"),
+ "payload": self.get_template_params(payload)
+ }
+ self.log("Import template details from the playbook: {0}"
+ .format(_import_template), "DEBUG")
+ if _import_template:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='imports_the_templates_provided',
+ params=import_template,
+ )
+ validation_string = "successfully imported template"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.result['response'][0].update({"importTemplate": validation_string})
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update/Create templates and projects in DNAC with fields provided in DNAC.
+ Export the tempaltes and projects.
+ Import the templates and projects.
+ Check using check_return_status().
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ self.update_configuration_templates(config)
+ if self.status == "failed":
+ return self
+
+ self.handle_export(config)
+ if self.status == "failed":
+ return self
+
+ self.handle_import(config)
+ if self.status == "failed":
+ return self
+
+ self.msg = "Successfully completed merged state execution"
+ self.status = "success"
+ return self
+
+ def delete_project_or_template(self, config, is_delete_project=False):
+ """
+ Call DNAC API to delete project or template with provided inputs.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+ is_delete_project (bool) - True if we need to delete project, else False.
+
+ Returns:
+ self
+ """
+
+ if is_delete_project:
+ params_key = {"project_id": self.have_project.get("id")}
+ deletion_value = "deletes_the_project"
+ name = "project: {0}".format(config.get("configuration_templates").get('project_name'))
+ else:
+ template_params = self.want.get("template_params")
+ params_key = {"template_id": self.have_template.get("id")}
+ deletion_value = "deletes_the_template"
+ name = "templateName: {0}".format(template_params.get('templateName'))
+
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function=deletion_value,
+ params=params_key,
+ )
+ task_id = response.get("response").get("taskId")
+ if task_id:
+ task_details = self.get_task_details(task_id)
+ self.result['changed'] = True
+ self.result['msg'] = task_details.get('progress')
+ self.result['diff'] = config.get("configuration_templates")
+
+ self.log("Task details for '{0}': {1}".format(deletion_value, task_details), "DEBUG")
self.result['response'] = task_details if task_details else response
-
- if not self.result.get('msg'):
- self.result['msg'] = "Error while versioning the template"
-
- def get_diff_delete(self):
- template_exists = self.have_create.get("template_found")
-
- if template_exists:
- response = self.dnac._exec(
+ if not self.result['msg']:
+ self.result['msg'] = "Error while deleting {name} : "
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully deleted {0} ".format(name)
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete projects or templates in DNAC with fields provided in playbook.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ is_project_found = self.have_project.get("project_found")
+ projectName = config.get("configuration_templates").get("project_name")
+
+ if not is_project_found:
+ self.msg = "Project {0} is not found".format(projectName)
+ self.status = "failed"
+ return self
+
+ is_template_found = self.have_template.get("template_found")
+ template_params = self.want.get("template_params")
+ templateName = config.get("configuration_templates").get("template_name")
+ if template_params.get("name"):
+ if is_template_found:
+ self.delete_project_or_template(config)
+ else:
+ self.msg = "Invalid template {0} under project".format(templateName)
+ self.status = "failed"
+ return self
+ else:
+ self.log("Template name is empty, deleting the project '{0}' and "
+ "associated templates"
+ .format(config.get("configuration_templates").get("project_name")), "INFO")
+ is_project_deletable = self.have_project.get("isDeletable")
+ if is_project_deletable:
+ self.delete_project_or_template(config, is_delete_project=True)
+ else:
+ self.msg = "Project is not deletable"
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully completed delete state execution"
+ self.status = "success"
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ if config.get("configuration_templates") is not None:
+ is_template_available = self.get_have_project(config)
+ self.log("Template availability: {0}".format(is_template_available), "INFO")
+ if not is_template_available:
+ self.msg = "Configuration Template config is not applied to the DNAC."
+ self.status = "failed"
+ return self
+
+ self.get_have_template(config, is_template_available)
+ self.log("Current State (have): {0}".format(self.want.get("template_params")), "INFO")
+ self.log("Desired State (want): {0}".format(self.have_template.get("template")), "INFO")
+ template_params = ["language", "name", "projectName", "softwareType",
+ "softwareVariant", "templateContent"]
+ for item in template_params:
+ if self.have_template.get("template").get(item) != self.want.get("template_params").get(item):
+ self.msg = " Configuration Template config is not applied to the DNAC."
+ self.status = "failed"
+ return self
+ self.log("Successfully validated the Template in the Catalyst Center.", "INFO")
+ self.result.get("response").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Configuration Templates."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the DNAC configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ if config.get("configuration_templates") is not None:
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ template_list = self.dnac_apply['exec'](
family="configuration_templates",
- function="deletes_the_template",
- params={"template_id": self.have_create.get("templateId")},
+ function="gets_the_templates_available",
+ params={"projectNames": config.get("projectName")},
)
- task_details = {}
- task_id = response.get("response").get("taskId")
+ if template_list and isinstance(template_list, list):
+ templateName = config.get("configuration_templates").get("template_name")
+ template_info = get_dict_result(template_list,
+ "name",
+ templateName)
+ if template_info:
+ self.msg = "Configuration Template config is not applied to the DNAC."
+ self.status = "failed"
+ return self
- if task_id:
- task_details = self.get_task_details(task_id)
- self.result['changed'] = True
- self.result['msg'] = task_details.get('progress')
- self.result['diff'] = self.validated
+ self.log("Successfully validated absence of template in the Catalyst Center.", "INFO")
+ self.result.get("response").update({"Validation": "Success"})
- if self.log:
- log(str(task_details))
+ self.msg = "Successfully validated the absence of Template in the DNAC."
+ self.status = "success"
+ return self
- self.result['response'] = task_details if task_details else response
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values.
- if not self.result['msg']:
- self.result['msg'] = "Error while deleting template"
- else:
- self.module.fail_json(msg="Template not found", response=[])
+ Parameters:
+ self - The current object.
+
+ Returns:
+ None
+ """
+
+ self.have_project.clear()
+ self.have_template.clear()
+ self.want.clear()
def main():
- """ main entry point for module execution
- """
-
- element_spec = dict(
- dnac_host=dict(required=True, type='str'),
- dnac_port=dict(type='str', default='443'),
- dnac_username=dict(type='str', default='admin', aliases=["user"]),
- dnac_password=dict(type='str', no_log=True),
- dnac_verify=dict(type='bool', default='True'),
- dnac_version=dict(type="str", default="2.2.3.3"),
- dnac_debug=dict(type='bool', default=False),
- dnac_log=dict(type='bool', default=False),
- validate_response_schema=dict(type="bool", default=True),
- config=dict(required=True, type='list', elements='dict'),
- state=dict(
- default='merged',
- choices=['merged', 'deleted']),
- )
+ """ main entry point for module execution"""
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
module = AnsibleModule(argument_spec=element_spec,
supports_check_mode=False)
dnac_template = DnacTemplate(module)
- dnac_template.validate_input()
- state = dnac_template.get_state()
- dnac_template.get_have()
- dnac_template.get_want()
-
- if state == "merged":
- dnac_template.get_diff_merge()
-
- elif state == "deleted":
- dnac_template.get_diff_delete()
+ dnac_template.validate_input().check_return_status()
+ state = dnac_template.params.get("state")
+ config_verify = dnac_template.params.get("config_verify")
+ if state not in dnac_template.supported_states:
+ dnac_template.status = "invalid"
+ dnac_template.msg = "State {0} is invalid".format(state)
+ dnac_template.check_return_status()
+
+ for config in dnac_template.validated_config:
+ dnac_template.reset_values()
+ dnac_template.get_have(config).check_return_status()
+ dnac_template.get_want(config).check_return_status()
+ dnac_template.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ dnac_template.verify_diff_state_apply[state](config).check_return_status()
module.exit_json(**dnac_template.result)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py
new file mode 100644
index 000000000..13e81da9a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py
@@ -0,0 +1,2885 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+"""Ansible module to perform operations on project and templates in Cisco Catalyst Center."""
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ['Madhan Sankaranarayanan, Rishita Chowdhary, Akash Bhaskaran, Muthu Rakesh']
+
+DOCUMENTATION = r"""
+---
+module: template_workflow_manager
+short_description: Resource module for Template functions
+description:
+- Manage operations create, update and delete of the resource Configuration Template.
+- API to create a template by project name and template name.
+- API to update a template by template name and project name.
+- API to delete a template by template name and project name.
+- API to export the projects for given projectNames.
+- API to export the templates for given templateIds.
+- API to manage operation create of the resource Configuration Template Import Project.
+- API to manage operation create of the resource Configuration Template Import Template.
+version_added: '6.6.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Madhan Sankaranarayanan (@madhansansel)
+ Rishita Chowdhary (@rishitachowdhary)
+ Akash Bhaskaran (@akabhask)
+ Muthu Rakesh (@MUTHU-RAKESH-27)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description:
+ - List of details of templates being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ configuration_templates:
+ description: Create/Update/Delete template.
+ type: dict
+ suboptions:
+ author:
+ description: Author of template.
+ type: str
+ composite:
+ description: Is it composite template.
+ type: bool
+ containing_templates:
+ description: Configuration Template Create's containingTemplates.
+ suboptions:
+ composite:
+ description: Is it composite template.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: deviceTypes on which templates would be applied.
+ type: list
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language
+ choices:
+ - JINJA
+ - VELOCITY
+ type: str
+ name:
+ description: Name of template.
+ type: str
+ project_name:
+ description: Name of the project under which templates are managed.
+ type: str
+ project_description:
+ description: Description of the project created.
+ type: str
+ rollback_template_params:
+ description: Params required for template rollback.
+ type: list
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ type: list
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ tags:
+ description: Configuration Template Create's tags.
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ elements: dict
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Create's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ elements: dict
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ version:
+ description: Current version of template.
+ type: str
+ type: list
+ elements: dict
+ create_time:
+ description: Create time of template.
+ type: int
+ custom_params_order:
+ description: Custom Params Order.
+ type: bool
+ template_description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Create's deviceTypes. This field is mandatory to create a new template.
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ type: list
+ elements: dict
+ failure_policy:
+ description: Define failure policy if template provisioning fails.
+ type: str
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language
+ choices:
+ - JINJA
+ - VELOCITY
+ type: str
+ last_update_time:
+ description: Update time of template.
+ type: int
+ latest_version_time:
+ description: Latest versioned template time.
+ type: int
+ template_name:
+ description: Name of template. This field is mandatory to create a new template.
+ type: str
+ parent_template_id:
+ description: Parent templateID.
+ type: str
+ project_id:
+ description: Project UUID.
+ type: str
+ project_name:
+ description: Project name.
+ type: str
+ project_description:
+ description: Project Description.
+ type: str
+ rollback_template_content:
+ description: Rollback template content.
+ type: str
+ rollback_template_params:
+ description: Configuration Template Create's rollbackTemplateParams.
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ elements: dict
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ elements: dict
+ software_type:
+ description: Applicable device software type. This field is mandatory to create a new template.
+ type: str
+ software_variant:
+ description: Applicable device software variant.
+ type: str
+ software_version:
+ description: Applicable device software version.
+ type: str
+ template_tag:
+ description: Configuration Template Create's tags.
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ elements: dict
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Create's templateParams.
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Create's range.
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ elements: dict
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Create's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ elements: dict
+ validation_errors:
+ description: Configuration Template Create's validationErrors.
+ suboptions:
+ rollback_template_errors:
+ description: Validation or design conflicts errors of rollback template.
+ elements: dict
+ type: list
+ template_errors:
+ description: Validation or design conflicts errors.
+ elements: dict
+ type: list
+ template_id:
+ description: UUID of template.
+ type: str
+ template_version:
+ description: Current version of template.
+ type: str
+ type: dict
+ version:
+ description: Current version of template.
+ type: str
+ version_description:
+ description: Template version comments.
+ type: str
+ export:
+ description: Export the project/template details.
+ type: dict
+ suboptions:
+ project:
+ description: Export the project.
+ type: list
+ elements: str
+ template:
+ description: Export the template.
+ type: list
+ elements: dict
+ suboptions:
+ project_name:
+ description: Name of the project under the template available.
+ type: str
+ template_name:
+ description: Name of the template which we need to export
+ type: str
+ import:
+ description: Import the project/template details.
+ type: dict
+ suboptions:
+ project:
+ description: Import the project details.
+ type: dict
+ suboptions:
+ do_version:
+ description: DoVersion query parameter. If this flag is true, creates a new
+ version of the template with the imported contents, if the templates already
+ exists. " If false and if template already exists, then operation
+ fails with 'Template already exists' error.
+ type: bool
+ template:
+ description: Import the template details.
+ type: dict
+ suboptions:
+ do_version:
+ description: DoVersion query parameter. If this flag is true, creates a new
+ version of the template with the imported contents, if the templates already
+ exists. " If false and if template already exists, then operation
+ fails with 'Template already exists' error.
+ type: bool
+ payload:
+ description: Configuration Template Import Template's payload.
+ elements: dict
+ suboptions:
+ author:
+ description: Author of template.
+ type: str
+ composite:
+ description: Is it composite template.
+ type: bool
+ containing_templates:
+ description: Configuration Template Import Template's containingTemplates.
+ elements: dict
+ suboptions:
+ composite:
+ description: Is it composite template.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Import Template's deviceTypes.
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ type: list
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language (JINJA or VELOCITY).
+ type: str
+ name:
+ description: Name of template.
+ type: str
+ project_name:
+ description: Project name.
+ type: str
+ rollback_template_params:
+ description: Configuration Template Import Template's rollbackTemplateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ tags:
+ description: Configuration Template Import Template's tags.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Import Template's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ version:
+ description: Current version of template.
+ type: str
+ type: list
+ create_time:
+ description: Create time of template.
+ type: int
+ custom_params_order:
+ description: Custom Params Order.
+ type: bool
+ description:
+ description: Description of template.
+ type: str
+ device_types:
+ description: Configuration Template Import Template's deviceTypes.
+ elements: dict
+ suboptions:
+ product_family:
+ description: Device family.
+ type: str
+ product_series:
+ description: Device series.
+ type: str
+ product_type:
+ description: Device type.
+ type: str
+ type: list
+ failure_policy:
+ description: Define failure policy if template provisioning fails.
+ type: str
+ id:
+ description: UUID of template.
+ type: str
+ language:
+ description: Template language (JINJA or VELOCITY).
+ type: str
+ last_update_time:
+ description: Update time of template.
+ type: int
+ latest_version_time:
+ description: Latest versioned template time.
+ type: int
+ name:
+ description: Name of template.
+ type: str
+ parent_template_id:
+ description: Parent templateID.
+ type: str
+ project_id:
+ description: Project UUID.
+ type: str
+ project_name:
+ description: Project name.
+ type: str
+ rollback_template_content:
+ description: Rollback template content.
+ type: str
+ rollback_template_params:
+ description: Configuration Template Import Template's rollbackTemplateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ software_type:
+ description: Applicable device software type.
+ type: str
+ software_variant:
+ description: Applicable device software variant.
+ type: str
+ software_version:
+ description: Applicable device software version.
+ type: str
+ tags:
+ description: Configuration Template Import Template's tags.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of tag.
+ type: str
+ name:
+ description: Name of tag.
+ type: str
+ type: list
+ template_content:
+ description: Template content.
+ type: str
+ template_params:
+ description: Configuration Template Import Template's templateParams.
+ elements: dict
+ suboptions:
+ binding:
+ description: Bind to source.
+ type: str
+ custom_order:
+ description: CustomOrder of template param.
+ type: int
+ data_type:
+ description: Datatype of template param.
+ type: str
+ default_value:
+ description: Default value of template param.
+ type: str
+ description:
+ description: Description of template param.
+ type: str
+ display_name:
+ description: Display name of param.
+ type: str
+ group:
+ description: Group.
+ type: str
+ id:
+ description: UUID of template param.
+ type: str
+ instruction_text:
+ description: Instruction text for param.
+ type: str
+ key:
+ description: Key.
+ type: str
+ not_param:
+ description: Is it not a variable.
+ type: bool
+ order:
+ description: Order of template param.
+ type: int
+ param_array:
+ description: Is it an array.
+ type: bool
+ parameter_name:
+ description: Name of template param.
+ type: str
+ provider:
+ description: Provider.
+ type: str
+ range:
+ description: Configuration Template Import Template's range.
+ elements: dict
+ suboptions:
+ id:
+ description: UUID of range.
+ type: str
+ max_value:
+ description: Max value of range.
+ type: int
+ min_value:
+ description: Min value of range.
+ type: int
+ type: list
+ required:
+ description: Is param required.
+ type: bool
+ selection:
+ description: Configuration Template Import Template's selection.
+ suboptions:
+ default_selected_values:
+ description: Default selection values.
+ elements: str
+ type: list
+ id:
+ description: UUID of selection.
+ type: str
+ selection_type:
+ description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
+ type: str
+ selection_values:
+ description: Selection values.
+ type: dict
+ type: dict
+ type: list
+ validation_errors:
+ description: Configuration Template Import Template's validationErrors.
+ suboptions:
+ rollback_template_errors:
+ description: Validation or design conflicts errors of rollback template.
+ type: dict
+ template_errors:
+ description: Validation or design conflicts errors.
+ type: dict
+ template_id:
+ description: UUID of template.
+ type: str
+ template_version:
+ description: Current version of template.
+ type: str
+ type: dict
+ version:
+ description: Current version of template.
+ type: str
+ type: list
+ project_name:
+ description: ProjectName path parameter. Project name to create template under the
+ project.
+ type: str
+
+requirements:
+- dnacentersdk == 2.4.5
+- python >= 3.5
+notes:
+ - SDK Method used are
+ configuration_templates.ConfigurationTemplates.create_template,
+ configuration_templates.ConfigurationTemplates.deletes_the_template,
+ configuration_templates.ConfigurationTemplates.update_template,
+ configuration_templates.ConfigurationTemplates.export_projects,
+ configuration_templates.ConfigurationTemplates.export_templates,
+ configuration_templates.ConfigurationTemplates.imports_the_projects_provided,
+ configuration_templates.ConfigurationTemplates.imports_the_templates_provided,
+
+ - Paths used are
+ post /dna/intent/api/v1/template-programmer/project/{projectId}/template,
+ delete /dna/intent/api/v1/template-programmer/template/{templateId},
+ put /dna/intent/api/v1/template-programmer/template,
+ post /dna/intent/api/v1/template-programmer/project/name/exportprojects,
+ post /dna/intent/api/v1/template-programmer/template/exporttemplates,
+ post /dna/intent/api/v1/template-programmer/project/importprojects,
+ post /dna/intent/api/v1/template-programmer/project/name/{projectName}/template/importtemplates,
+
+"""
+
+EXAMPLES = r"""
+- name: Create a new template.
+ cisco.dnac.template_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ - configuration_templates:
+ author: string
+ composite: true
+ create_time: 0
+ custom_params_order: true
+ description: string
+ device_types:
+ - product_family: string
+ product_series: string
+ product_type: string
+ failure_policy: string
+ id: string
+ language: string
+ last_update_time: 0
+ latest_version_time: 0
+ name: string
+ parent_template_id: string
+ project_id: string
+ project_name: string
+ project_description: string
+ rollback_template_content: string
+ software_type: string
+ software_variant: string
+ software_version: string
+ tags:
+ - id: string
+ name: string
+ template_content: string
+ validation_errors:
+ rollback_template_errors:
+ - {}
+ template_errors:
+ - {}
+ template_id: string
+ template_version: string
+ version: string
+
+- name: Export the projects.
+ cisco.dnac.template_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ export:
+ project:
+ - string
+ - string
+
+- name: Export the templates.
+ cisco.dnac.template_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ export:
+ template:
+ - project_name : string
+ template_name: string
+ - project_name: string
+ template_name: string
+
+- name: Import the Projects.
+ cisco.dnac.template_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config_verify: True
+ config:
+ import:
+ project:
+ do_version: false
+ payload:
+ - name: string
+ - name: string
+
+"""
+
+RETURN = r"""
+# Case_1: Successful creation/updation/deletion of template/project
+response_1:
+ description: A dictionary with versioning details of the template as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "endTime": 0,
+ "version": 0,
+ "data": String,
+ "startTime": 0,
+ "username": String,
+ "progress": String,
+ "serviceType": String, "rootId": String,
+ "isError": bool,
+ "instanceTenantId": String,
+ "id": String
+ "version": 0
+ },
+ "msg": String
+ }
+
+# Case_2: Error while deleting a template or when given project is not found
+response_2:
+ description: A list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+
+# Case_3: Given template already exists and requires no update
+response_3:
+ description: A dictionary with the exisiting template deatails as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {},
+ "msg": String
+ }
+
+# Case_4: Given template list that needs to be exported
+response_4:
+ description: Details of the templates in the list as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {},
+ "msg": String
+ }
+
+# Case_5: Given project list that needs to be exported
+response_5:
+ description: Details of the projects in the list as returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {},
+ "msg": String
+ }
+
+"""
+
+import copy
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+ dnac_compare_equality,
+)
+
+
+class Template(DnacBase):
+ """Class containing member attributes for template_workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.have_project = {}
+ self.have_template = {}
+ self.supported_states = ["merged", "deleted"]
+ self.accepted_languages = ["JINJA", "VELOCITY"]
+ self.export_template = []
+ self.result['response'].append({})
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed',
+ 'self.msg' will describe the validation issues.
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validattion"
+ self.status = "success"
+ return self
+
+ temp_spec = {
+ "configuration_templates": {
+ 'type': 'dict',
+ 'tags': {'type': 'list'},
+ 'author': {'type': 'str'},
+ 'composite': {'type': 'bool'},
+ 'containing_templates': {'type': 'list'},
+ 'create_time': {'type': 'int'},
+ 'custom_params_order': {'type': 'bool'},
+ 'description': {'type': 'str'},
+ 'device_types': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'product_family': {'type': 'str'},
+ 'product_series': {'type': 'str'},
+ 'product_type': {'type': 'str'},
+ },
+ 'failure_policy': {'type': 'str'},
+ 'id': {'type': 'str'},
+ 'language': {'type': 'str'},
+ 'last_update_time': {'type': 'int'},
+ 'latest_version_time': {'type': 'int'},
+ 'name': {'type': 'str'},
+ 'parent_template_id': {'type': 'str'},
+ 'project_id': {'type': 'str'},
+ 'project_name': {'type': 'str'},
+ 'project_description': {'type': 'str'},
+ 'rollback_template_content': {'type': 'str'},
+ 'rollback_template_params': {'type': 'list'},
+ 'software_type': {'type': 'str'},
+ 'software_variant': {'type': 'str'},
+ 'software_version': {'type': 'str'},
+ 'template_content': {'type': 'str'},
+ 'template_params': {'type': 'list'},
+ 'template_name': {'type': 'str'},
+ 'validation_errors': {'type': 'dict'},
+ 'version': {'type': 'str'},
+ 'version_description': {'type': 'str'}
+ },
+ 'export': {
+ 'type': 'dict',
+ 'project': {'type': 'list', 'elements': 'str'},
+ 'template': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'project_name': {'type': 'str'},
+ 'template_name': {'type': 'str'}
+ }
+ },
+ 'import': {
+ 'type': 'dict',
+ 'project': {
+ 'type': 'dict',
+ 'do_version': {'type': 'str', 'default': 'False'},
+ },
+ 'template': {
+ 'type': 'dict',
+ 'do_version': {'type': 'str', 'default': 'False'},
+ 'payload': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'tags': {'type': 'list'},
+ 'author': {'type': 'str'},
+ 'composite': {'type': 'bool'},
+ 'containing_templates': {'type': 'list'},
+ 'create_time': {'type': 'int'},
+ 'custom_params_order': {'type': 'bool'},
+ 'description': {'type': 'str'},
+ 'device_types': {
+ 'type': 'list',
+ 'elements': 'dict',
+ 'product_family': {'type': 'str'},
+ 'product_series': {'type': 'str'},
+ 'product_type': {'type': 'str'},
+ },
+ 'failure_policy': {'type': 'str'},
+ 'id': {'type': 'str'},
+ 'language': {'type': 'str'},
+ 'last_update_time': {'type': 'int'},
+ 'latest_version_time': {'type': 'int'},
+ 'name': {'type': 'str'},
+ 'parent_template_id': {'type': 'str'},
+ 'project_id': {'type': 'str'},
+ 'project_name': {'type': 'str'},
+ 'project_description': {'type': 'str'},
+ 'rollback_template_content': {'type': 'str'},
+ 'rollback_template_params': {'type': 'list'},
+ 'software_type': {'type': 'str'},
+ 'software_variant': {'type': 'str'},
+ 'software_version': {'type': 'str'},
+ 'template_content': {'type': 'str'},
+ 'template_params': {'type': 'list'},
+ 'template_name': {'type': 'str'},
+ 'validation_errors': {'type': 'dict'},
+ 'version': {'type': 'str'}
+ }
+ }
+ }
+ }
+ # Validate template params
+ self.config = self.camel_to_snake_case(self.config)
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input"
+ self.status = "success"
+ return self
+
+ def get_project_params(self, params):
+ """
+ Store project parameters from the playbook for template processing in Cisco Catalyst Center.
+
+ Parameters:
+ params (dict) - Playbook details containing Project information.
+
+ Returns:
+ project_params (dict) - Organized Project parameters.
+ """
+
+ project_params = {"name": params.get("project_name"),
+ "description": params.get("project_description")
+ }
+ return project_params
+
+ def get_tags(self, _tags):
+ """
+ Store tags from the playbook for template processing in Cisco Catalyst Center.
+ Check using check_return_status()
+
+ Parameters:
+ tags (dict) - Tags details containing Template information.
+
+ Returns:
+ tags (dict) - Organized tags parameters.
+ """
+
+ if _tags is None:
+ return None
+
+ tags = []
+ i = 0
+ for item in _tags:
+ tags.append({})
+ id = item.get("id")
+ if id is not None:
+ tags[i].update({"id": id})
+
+ name = item.get("name")
+ if name is not None:
+ tags[i].update({"name": name})
+ else:
+ self.msg = "name is mandatory in tags in location " + str(i)
+ self.status = "failed"
+ return self.check_return_status()
+
+ return tags
+
+ def get_device_types(self, device_types):
+ """
+ Store device types parameters from the playbook for template processing in Cisco Catalyst Center.
+ Check using check_return_status()
+
+ Parameters:
+ device_types (dict) - Device types details containing Template information.
+
+ Returns:
+ deviceTypes (dict) - Organized device types parameters.
+ """
+
+ if device_types is None:
+ self.msg = "Mandatory parameter 'device_types' is required."
+ self.status = "failed"
+ return self.check_return_status()
+
+ deviceTypes = []
+ i = 0
+ for item in device_types:
+ deviceTypes.append({})
+ product_family = item.get("product_family")
+ if product_family is not None:
+ deviceTypes[i].update({"productFamily": product_family})
+ else:
+ self.msg = "product_family is mandatory for deviceTypes"
+ self.status = "failed"
+ return self.check_return_status()
+
+ product_series = item.get("product_series")
+ if product_series is not None:
+ deviceTypes[i].update({"productSeries": product_series})
+ product_type = item.get("product_type")
+ if product_type is not None:
+ deviceTypes[i].update({"productType": product_type})
+ i = i + 1
+
+ return deviceTypes
+
+ def get_validation_errors(self, validation_errors):
+ """
+ Store template parameters from the playbook for template processing in Cisco Catalyst Center.
+
+ Parameters:
+ validation_errors (dict) - Playbook details containing validation errors information.
+
+ Returns:
+ validationErrors (dict) - Organized validation errors parameters.
+ """
+
+ if validation_errors is None:
+ return None
+
+ validationErrors = {}
+ rollback_template_errors = validation_errors.get("rollback_template_errors")
+ if rollback_template_errors is not None:
+ validationErrors.update({
+ "rollbackTemplateErrors": rollback_template_errors
+ })
+
+ template_errors = validation_errors.get("template_errors")
+ if template_errors is not None:
+ validationErrors.update({
+ "templateErrors": template_errors
+ })
+
+ template_id = validation_errors.get("template_id")
+ if template_id is not None:
+ validationErrors.update({
+ "templateId": template_id
+ })
+
+ template_version = validation_errors.get("template_version")
+ if template_version is not None:
+ validationErrors.update({
+ "templateVersion": template_version
+ })
+
+ return validationErrors
+
+ def get_template_info(self, template_params):
+ """
+ Store template params from the playbook for template processing in Cisco Catalyst Center.
+ Check using check_return_status()
+
+ Parameters:
+ template_params (dict) - Playbook details containing template params information.
+
+ Returns:
+ templateParams (dict) - Organized template params parameters.
+ """
+
+ if template_params is None:
+ return None
+
+ templateParams = []
+ i = 0
+ self.log("Template params details: {0}".format(template_params), "DEBUG")
+ for item in template_params:
+ self.log("Template params items: {0}".format(item), "DEBUG")
+ templateParams.append({})
+ binding = item.get("binding")
+ if binding is not None:
+ templateParams[i].update({"binding": binding})
+
+ custom_order = item.get("custom_order")
+ if custom_order is not None:
+ templateParams[i].update({"customOrder": custom_order})
+
+ default_value = item.get("default_value")
+ if default_value is not None:
+ templateParams[i].update({"defaultValue": default_value})
+
+ description = item.get("description")
+ if description is not None:
+ templateParams[i].update({"description": description})
+
+ display_name = item.get("display_name")
+ if display_name is not None:
+ templateParams[i].update({"displayName": display_name})
+
+ group = item.get("group")
+ if group is not None:
+ templateParams[i].update({"group": group})
+
+ id = item.get("id")
+ if id is not None:
+ templateParams[i].update({"id": id})
+
+ instruction_text = item.get("instruction_text")
+ if instruction_text is not None:
+ templateParams[i].update({"instructionText": instruction_text})
+
+ key = item.get("key")
+ if key is not None:
+ templateParams[i].update({"key": key})
+
+ not_param = item.get("not_param")
+ if not_param is not None:
+ templateParams[i].update({"notParam": not_param})
+
+ order = item.get("order")
+ if order is not None:
+ templateParams[i].update({"order": order})
+
+ param_array = item.get("param_array")
+ if param_array is not None:
+ templateParams[i].update({"paramArray": param_array})
+
+ provider = item.get("provider")
+ if provider is not None:
+ templateParams[i].update({"provider": provider})
+
+ parameter_name = item.get("parameter_name")
+ if parameter_name is not None:
+ templateParams[i].update({"parameterName": parameter_name})
+ else:
+ self.msg = "parameter_name is mandatory for the template_params."
+ self.status = "failed"
+ return self.check_return_status()
+
+ data_type = item.get("data_type")
+ datatypes = ["STRING", "INTEGER", "IPADDRESS", "MACADDRESS", "SECTIONDIVIDER"]
+ if data_type is not None:
+ templateParams[i].update({"dataType": data_type})
+ else:
+ self.msg = "dataType is mandatory for the template_params."
+ self.status = "failed"
+ return self.check_return_status()
+ if data_type not in datatypes:
+ self.msg = "data_type under template_params should be in " + str(datatypes)
+ self.status = "failed"
+ return self.check_return_status()
+
+ required = item.get("required")
+ if required is not None:
+ templateParams[i].update({"required": required})
+
+ range = item.get("range")
+ self.log("Template params range list: {0}".format(range), "DEBUG")
+ if range is not None:
+ templateParams[i].update({"range": []})
+ _range = templateParams[i].get("range")
+ self.log("Template params range: {0}".format(_range), "DEBUG")
+ j = 0
+ for value in range:
+ _range.append({})
+ id = value.get("id")
+ if id is not None:
+ _range[j].update({"id": id})
+ max_value = value.get("max_value")
+ if max_value is not None:
+ _range[j].update({"maxValue": max_value})
+ else:
+ self.msg = "max_value is mandatory for range under template_params"
+ self.status = "failed"
+ return self.check_return_status()
+ min_value = value.get("min_value")
+ if min_value is not None:
+ _range[j].update({"maxValue": min_value})
+ else:
+ self.msg = "min_value is mandatory for range under template_params"
+ self.status = "failed"
+ return self.check_return_status()
+ j = j + 1
+
+ self.log("Template params details: {0}".format(templateParams), "DEBUG")
+ selection = item.get("selection")
+ self.log("Template params selection: {0}".format(selection), "DEBUG")
+ if selection is not None:
+ templateParams[i].update({"selection": {}})
+ _selection = templateParams[i].get("selection")
+ id = selection.get("id")
+ if id is not None:
+ _selection.update({"id": id})
+ default_selected_values = selection.get("default_selected_values")
+ if default_selected_values is not None:
+ _selection.update({"defaultSelectedValues": default_selected_values})
+ selection_values = selection.get("selection_values")
+ if selection_values is not None:
+ _selection.update({"selectionValues": selection_values})
+ selection_type = selection.get("selection_type")
+ if selection_type is not None:
+ _selection.update({"selectionType": selection_type})
+ i = i + 1
+
+ return templateParams
+
+ def get_containing_templates(self, containing_templates):
+ """
+ Store tags from the playbook for template processing in Cisco Catalyst Center.
+ Check using check_return_status()
+
+ Parameters:
+ containing_templates (dict) - Containing tempaltes details
+ containing Template information.
+
+ Returns:
+ containingTemplates (dict) - Organized containing templates parameters.
+ """
+
+ if containing_templates is None:
+ return None
+
+ containingTemplates = []
+ i = 0
+ for item in containing_templates:
+ containingTemplates.append({})
+ _tags = item.get("tags")
+ if _tags is not None:
+ containingTemplates[i].update({"tags": self.get_tags(_tags)})
+
+ composite = item.get("composite")
+ if composite is not None:
+ containingTemplates[i].update({"composite": composite})
+
+ description = item.get("description")
+ if description is not None:
+ containingTemplates[i].update({"description": description})
+
+ device_types = item.get("device_types")
+ if device_types is not None:
+ containingTemplates[i].update({
+ "deviceTypes": self.get_device_types(device_types)
+ })
+
+ id = item.get("id")
+ if id is not None:
+ containingTemplates[i].update({"id": id})
+
+ name = item.get("name")
+ if name is None:
+ self.msg = "name is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
+
+ containingTemplates[i].update({"name": name})
+
+ language = item.get("language")
+ if language is None:
+ self.msg = "language is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
+
+ language_list = ["JINJA", "VELOCITY"]
+ if language not in language_list:
+ self.msg = "language under containing templates should be in " + str(language_list)
+ self.status = "failed"
+ return self.check_return_status()
+
+ containingTemplates[i].update({"language": language})
+
+ project_name = item.get("project_name")
+ if project_name is not None:
+ containingTemplates[i].update({"projectName": project_name})
+ else:
+ self.msg = "project_name is mandatory under containing templates"
+ self.status = "failed"
+ return self.check_return_status()
+
+ rollback_template_params = item.get("rollback_template_params")
+ if rollback_template_params is not None:
+ containingTemplates[i].update({
+ "rollbackTemplateParams": self.get_template_info(rollback_template_params)
+ })
+
+ template_content = item.get("template_content")
+ if template_content is not None:
+ containingTemplates[i].update({"templateContent": template_content})
+
+ template_params = item.get("template_params")
+ if template_params is not None:
+ containingTemplates[i].update({
+ "templateParams": self.get_template_info(template_params)
+ })
+
+ version = item.get("version")
+ if version is not None:
+ containingTemplates[i].update({"version": version})
+
+ return containingTemplates
+
+ def get_template_params(self, params):
+ """
+ Store template parameters from the playbook for template processing in Cisco Catalyst Center.
+
+ Parameters:
+ params (dict) - Playbook details containing Template information.
+
+ Returns:
+ temp_params (dict) - Organized template parameters.
+ """
+
+ self.log("Template params playbook details: {0}".format(params), "DEBUG")
+ self.log(str(params))
+ temp_params = {
+ "tags": self.get_tags(params.get("template_tag")),
+ "author": params.get("author"),
+ "composite": params.get("composite"),
+ "containingTemplates":
+ self.get_containing_templates(params.get("containing_templates")),
+ "createTime": params.get("create_time"),
+ "customParamsOrder": params.get("custom_params_order"),
+ "description": params.get("template_description"),
+ "deviceTypes":
+ self.get_device_types(params.get("device_types")),
+ "failurePolicy": params.get("failure_policy"),
+ "id": params.get("id"),
+ "lastUpdateTime": params.get("last_update_time"),
+ "latestVersionTime": params.get("latest_version_time"),
+ "parentTemplateId": params.get("parent_template_id"),
+ "projectId": params.get("project_id"),
+ "rollbackTemplateContent": params.get("rollback_template_content"),
+ "rollbackTemplateParams":
+ self.get_template_info(params.get("rollback_template_params")),
+ "softwareVariant": params.get("software_variant"),
+ "softwareVersion": params.get("software_version"),
+ "templateContent": params.get("template_content"),
+ "templateParams":
+ self.get_template_info(params.get("template_params")),
+ "validationErrors":
+ self.get_validation_errors(params.get("validation_errors")),
+ "version": params.get("version"),
+ "project_id": params.get("project_id")
+ }
+ language = params.get("language")
+ if not language:
+ self.msg = "Mandatory parameter 'language' is required."
+ self.status = "failed"
+ return self.check_return_status()
+
+ language = language.upper()
+ language_list = ["JINJA", "VELOCITY"]
+ if language not in language_list:
+ self.msg = "language should be in '{0}'".format(language_list)
+ self.status = "failed"
+ return self.check_return_status()
+
+ temp_params.update({"language": language})
+
+ name = params.get("template_name")
+ if not name:
+ self.msg = "Mandatory parameter 'template_name' is required."
+ self.status = "failed"
+ return self.check_return_status()
+
+ temp_params.update({"name": name})
+
+ projectName = params.get("project_name")
+ if not projectName:
+ self.msg = "Mandatory parameter 'project_name' is required."
+ self.status = "failed"
+ return self.check_return_status()
+
+ temp_params.update({"project_name": projectName})
+
+ softwareType = params.get("software_type")
+ if not softwareType:
+ self.msg = "Mandatory parameter 'software_type' is required."
+ self.status = "failed"
+ return self.check_return_status()
+
+ temp_params.update({"softwareType": softwareType})
+
+ self.log("Formatted template params details: {0}".format(temp_params), "DEBUG")
+ copy_temp_params = copy.deepcopy(temp_params)
+ for item in copy_temp_params:
+ if temp_params[item] is None:
+ del temp_params[item]
+ self.log(str(temp_params))
+ return temp_params
+
+ def get_template(self, config):
+ """
+ Get the template needed for updation or creation.
+
+ Parameters:
+ config (dict) - Playbook details containing Template information.
+
+ Returns:
+ result (dict) - Template details for the given template ID.
+ """
+
+ result = None
+ items = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="get_template_details",
+ params={"template_id": config.get("templateId")}
+ )
+ if items:
+ result = items
+
+ self.log("Received API response from 'get_template_details': {0}".format(items), "DEBUG")
+ self.result['response'] = items
+ return result
+
+ def get_have_project(self, config):
+ """
+ Get the current project related information from Cisco Catalyst Center.
+
+ Parameters:
+ config (dict) - Playbook details containing Project information.
+
+ Returns:
+ template_available (list) - Current project information.
+ """
+
+ have_project = {}
+ given_projectName = config.get("configuration_templates").get("project_name")
+ template_available = None
+
+ # Check if project exists.
+ project_details = self.get_project_details(given_projectName)
+ # Cisco Catalyst Center returns project details even if the substring matches.
+ # Hence check the projectName retrieved from Cisco Catalyst Center.
+ if not (project_details and isinstance(project_details, list)):
+ self.log("Project: {0} not found, need to create new project in Cisco Catalyst Center"
+ .format(given_projectName), "INFO")
+ return None
+
+ fetched_projectName = project_details[0].get('name')
+ if fetched_projectName != given_projectName:
+ self.log("Project {0} provided is not exact match in Cisco Catalyst Center DB"
+ .format(given_projectName), "INFO")
+ return None
+
+ template_available = project_details[0].get('templates')
+ have_project["project_found"] = True
+ have_project["id"] = project_details[0].get("id")
+ have_project["isDeletable"] = project_details[0].get("isDeletable")
+
+ self.have_project = have_project
+ return template_available
+
+ def get_have_template(self, config, template_available):
+ """
+ Get the current template related information from Cisco Catalyst Center.
+
+ Parameters:
+ config (dict) - Playbook details containing Template information.
+ template_available (list) - Current project information.
+
+ Returns:
+ self
+ """
+
+ projectName = config.get("configuration_templates").get("project_name")
+ templateName = config.get("configuration_templates").get("template_name")
+ template = None
+ have_template = {}
+
+ have_template["isCommitPending"] = False
+ have_template["template_found"] = False
+
+ template_details = get_dict_result(template_available,
+ "name",
+ templateName)
+ # Check if specified template in playbook is available
+ if not template_details:
+ self.log("Template {0} not found in project {1}"
+ .format(templateName, projectName), "INFO")
+ self.msg = "Template : {0} missing, new template to be created".format(templateName)
+ self.status = "success"
+ return self
+
+ config["templateId"] = template_details.get("id")
+ have_template["id"] = template_details.get("id")
+ # Get available templates which are committed under the project
+ template_list = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="gets_the_templates_available",
+ params={"projectNames": config.get("projectName")},
+ )
+ have_template["isCommitPending"] = True
+ # This check will fail if specified template is there not committed in Cisco Catalyst Center
+ if template_list and isinstance(template_list, list):
+ template_info = get_dict_result(template_list,
+ "name",
+ templateName)
+ if template_info:
+ template = self.get_template(config)
+ have_template["template"] = template
+ have_template["isCommitPending"] = False
+ have_template["template_found"] = template is not None \
+ and isinstance(template, dict)
+ self.log("Template {0} is found and template "
+ "details are :{1}".format(templateName, str(template)), "INFO")
+
+ # There are committed templates in the project but the
+ # one specified in the playbook may not be committed
+ self.log("Commit pending for template name {0}"
+ " is {1}".format(templateName, have_template.get('isCommitPending')), "INFO")
+
+ self.have_template = have_template
+ self.msg = "Successfully collected all template parameters from Cisco Catalyst Center for comparison"
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current project and template details from Cisco Catalyst Center.
+
+ Parameters:
+ config (dict) - Playbook details containing Project/Template information.
+
+ Returns:
+ self
+ """
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ if not configuration_templates.get("project_name"):
+ self.msg = "Mandatory Parameter project_name not available"
+ self.status = "failed"
+ return self
+ template_available = self.get_have_project(config)
+ if template_available:
+ self.get_have_template(config, template_available)
+
+ self.msg = "Successfully collected all project and template \
+ parameters from Cisco Catalyst Center for comparison"
+ self.status = "success"
+ return self
+
+ def get_project_details(self, projectName):
+ """
+ Get the details of specific project name provided.
+
+ Parameters:
+ projectName (str) - Project Name
+
+ Returns:
+ items (dict) - Project details with given project name.
+ """
+
+ items = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function='get_projects',
+ op_modifies=True,
+ params={"name": projectName},
+ )
+ return items
+
+ def get_want(self, config):
+ """
+ Get all the template and project related information from playbook
+ that is needed to be created in Cisco Catalyst Center.
+
+ Parameters:
+ config (dict) - Playbook details.
+
+ Returns:
+ self
+ """
+
+ want = {}
+ configuration_templates = config.get("configuration_templates")
+ self.log("Playbook details: {0}".format(config), "INFO")
+ if configuration_templates:
+ template_params = self.get_template_params(configuration_templates)
+ project_params = self.get_project_params(configuration_templates)
+ version_comments = configuration_templates.get("version_description")
+
+ if self.params.get("state") == "merged":
+ self.update_mandatory_parameters(template_params)
+
+ want["template_params"] = template_params
+ want["project_params"] = project_params
+ want["comments"] = version_comments
+
+ self.want = want
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.status = "success"
+ return self
+
+ def create_project_or_template(self, is_create_project=False):
+ """
+ Call Cisco Catalyst Center API to create project or template based on the input provided.
+
+ Parameters:
+ is_create_project (bool) - Default value is False.
+
+ Returns:
+ creation_id (str) - Project Id.
+ created (str) - True if Project created, else False.
+ """
+
+ creation_id = None
+ created = False
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ template_params = self.want.get("template_params")
+ project_params = self.want.get("project_params")
+
+ if is_create_project:
+ params_key = project_params
+ name = "project: {0}".format(project_params.get('name'))
+ validation_string = "Successfully created project"
+ creation_value = "create_project"
+ else:
+ params_key = template_params
+ name = "template: {0}".format(template_params.get('name'))
+ validation_string = "Successfully created template"
+ creation_value = "create_template"
+
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function=creation_value,
+ op_modifies=True,
+ params=params_key,
+ )
+ if not isinstance(response, dict):
+ self.log("Response of '{0}' is not in dictionary format."
+ .format(creation_value), "CRITICAL")
+ return creation_id, created
+
+ task_id = response.get("response").get("taskId")
+ if not task_id:
+ self.log("Task id {0} not found for '{1}'.".format(task_id, creation_value), "CRITICAL")
+ return creation_id, created
+
+ while not created:
+ task_details = self.get_task_details(task_id)
+ if not task_details:
+ self.log("Failed to get task details of '{0}' for taskid: {1}"
+ .format(creation_value, task_id), "CRITICAL")
+ return creation_id, created
+
+ self.log("Task details for {0}: {1}".format(creation_value, task_details), "DEBUG")
+ if task_details.get("isError"):
+ self.log("Error occurred for '{0}' with taskid: {1}"
+ .format(creation_value, task_id), "ERROR")
+ return creation_id, created
+
+ if validation_string not in task_details.get("progress"):
+ self.log("'{0}' progress set to {1} for taskid: {2}"
+ .format(creation_value, task_details.get('progress'), task_id), "DEBUG")
+ continue
+
+ task_details_data = task_details.get("data")
+ value = self.check_string_dictionary(task_details_data)
+ if value is None:
+ creation_id = task_details.get("data")
+ else:
+ creation_id = value.get("templateId")
+ if not creation_id:
+ self.log("Export data is not found for '{0}' with taskid : {1}"
+ .format(creation_value, task_id), "DEBUG")
+ continue
+
+ created = True
+ if is_create_project:
+ # ProjectId is required for creating a new template.
+ # Store it with other template parameters.
+ template_params["projectId"] = creation_id
+ template_params["project_id"] = creation_id
+
+ self.log("New {0} created with id {1}".format(name, creation_id), "DEBUG")
+ return creation_id, created
+
+ def requires_update(self):
+ """
+ Check if the template config given requires update.
+
+ Parameters:
+ self - Current object.
+
+ Returns:
+ bool - True if any parameter specified in obj_params differs between
+ current_obj and requested_obj, indicating that an update is required.
+ False if all specified parameters are equal.
+ """
+
+ if self.have_template.get("isCommitPending"):
+ self.log("Template '{0}' is in saved state and needs to be updated and committed."
+ .format(self.have_template.get("template").get("name")), "DEBUG")
+ return True
+
+ current_obj = self.have_template.get("template")
+ requested_obj = self.want.get("template_params")
+ self.log("Current State (have): {0}".format(current_obj), "INFO")
+ self.log("Desired State (want): {0}".format(requested_obj), "INFO")
+ obj_params = [
+ ("tags", "tags", ""),
+ ("author", "author", ""),
+ ("composite", "composite", False),
+ ("containingTemplates", "containingTemplates", []),
+ ("createTime", "createTime", ""),
+ ("customParamsOrder", "customParamsOrder", False),
+ ("description", "description", ""),
+ ("deviceTypes", "deviceTypes", []),
+ ("failurePolicy", "failurePolicy", ""),
+ ("id", "id", ""),
+ ("language", "language", "VELOCITY"),
+ ("lastUpdateTime", "lastUpdateTime", ""),
+ ("latestVersionTime", "latestVersionTime", ""),
+ ("name", "name", ""),
+ ("parentTemplateId", "parentTemplateId", ""),
+ ("projectId", "projectId", ""),
+ ("projectName", "projectName", ""),
+ ("rollbackTemplateContent", "rollbackTemplateContent", ""),
+ ("rollbackTemplateParams", "rollbackTemplateParams", []),
+ ("softwareType", "softwareType", ""),
+ ("softwareVariant", "softwareVariant", ""),
+ ("softwareVersion", "softwareVersion", ""),
+ ("templateContent", "templateContent", ""),
+ ("templateParams", "templateParams", []),
+ ("validationErrors", "validationErrors", {}),
+ ("version", "version", ""),
+ ]
+
+ return any(not dnac_compare_equality(current_obj.get(dnac_param, default),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param, default) in obj_params)
+
+ def update_mandatory_parameters(self, template_params):
+ """
+ Update parameters which are mandatory for creating a template.
+
+ Parameters:
+ template_params (dict) - Template information.
+
+ Returns:
+ None
+ """
+
+ # Mandate fields required for creating a new template.
+ # Store it with other template parameters.
+ self.log(str(template_params))
+ self.log(str(self.have_project))
+ template_params["projectId"] = self.have_project.get("id")
+ template_params["project_id"] = self.have_project.get("id")
+ # Update language,deviceTypes and softwareType if not provided for existing template.
+ if not template_params.get("language"):
+ template_params["language"] = self.have_template.get('template') \
+ .get('language')
+ if not template_params.get("deviceTypes"):
+ template_params["deviceTypes"] = self.have_template.get('template') \
+ .get('deviceTypes')
+ if not template_params.get("softwareType"):
+ template_params["softwareType"] = self.have_template.get('template') \
+ .get('softwareType')
+
+ def validate_input_merge(self, template_exists):
+ """
+ Validate input after getting all the parameters from Cisco Catalyst Center.
+ "If mandate like deviceTypes, softwareType and language "
+ "already present in Cisco Catalyst Center for a template."
+ "It is not required to be provided in playbook, "
+ "but if it is new creation error will be thrown to provide these fields.
+
+ Parameters:
+ template_exists (bool) - True if template exists, else False.
+
+ Returns:
+ None
+ """
+
+ template_params = self.want.get("template_params")
+ language = template_params.get("language").upper()
+ if language:
+ if language not in self.accepted_languages:
+ self.msg = "Invalid value language {0} ." \
+ "Accepted language values are {1}" \
+ .format(self.accepted_languages, language)
+ self.status = "failed"
+ return self
+ else:
+ template_params["language"] = "JINJA"
+
+ if not template_exists:
+ if not template_params.get("deviceTypes") \
+ or not template_params.get("softwareType"):
+ self.msg = "DeviceTypes and SoftwareType are required arguments to create Templates"
+ self.status = "failed"
+ return self
+
+ self.msg = "Input validated for merging"
+ self.status = "success"
+ return self
+
+ def get_export_template_values(self, export_values):
+ """
+ Get the export template values from the details provided by the playbook.
+
+ Parameters:
+ export_values (bool) - All the template available under the project.
+
+ Returns:
+ self
+ """
+
+ template_details = self.dnac._exec(
+ family="configuration_templates",
+ function='get_projects_details'
+ )
+ for values in export_values:
+ project_name = values.get("project_name")
+ self.log("Project name for export template: {0}".format(project_name), "DEBUG")
+ template_details = template_details.get("response")
+ self.log("Template details: {0}".format(template_details), "DEBUG")
+ all_template_details = get_dict_result(template_details,
+ "name",
+ project_name)
+ self.log("Template details under the project name {0}: {1}"
+ .format(project_name, all_template_details), "DEBUG")
+ all_template_details = all_template_details.get("templates")
+ self.log("Template details under the project name {0}: {1}"
+ .format(project_name, all_template_details), "DEBUG")
+ template_name = values.get("template_name")
+ template_detail = get_dict_result(all_template_details,
+ "name",
+ template_name)
+ self.log("Template details with template name {0}: {1}"
+ .format(template_name, template_detail), "DEBUG")
+ if template_detail is None:
+ self.msg = "Invalid project_name and template_name in export"
+ self.status = "failed"
+ return self
+ self.export_template.append(template_detail.get("id"))
+
+ self.msg = "Successfully collected the export template IDs"
+ self.status = "success"
+ return self
+
+ def update_configuration_templates(self, configuration_templates):
+ """
+ Update/Create templates and projects in CCC with fields provided in Cisco Catalyst Center.
+
+ Parameters:
+ configuration_templates (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ is_project_found = self.have_project.get("project_found")
+ if not is_project_found:
+ project_id, project_created = \
+ self.create_project_or_template(is_create_project=True)
+ if not project_created:
+ self.status = "failed"
+ self.msg = "Project creation failed"
+ return self
+
+ self.log("project created with projectId: {0}".format(project_id), "DEBUG")
+
+ is_template_found = self.have_template.get("template_found")
+ template_params = self.want.get("template_params")
+ self.log("Desired template details: {0}".format(template_params), "DEBUG")
+ self.log("Current template details: {0}".format(self.have_template), "DEBUG")
+ template_id = None
+ template_updated = False
+ self.validate_input_merge(is_template_found).check_return_status()
+ if is_template_found:
+ if not self.requires_update():
+ # Template does not need update
+ self.result.update({
+ 'response': self.have_template.get("template"),
+ 'msg': "Template does not need update"
+ })
+ self.status = "exited"
+ return self
+
+ template_id = self.have_template.get("id")
+ template_params.update({"id": template_id})
+ self.log("Current State (have): {0}".format(self.have_template), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="update_template",
+ params=template_params,
+ op_modifies=True,
+ )
+ template_updated = True
+ self.log("Updating existing template '{0}'."
+ .format(self.have_template.get("template").get("name")), "INFO")
+
+ else:
+ if not template_params.get("name"):
+ self.msg = "missing required arguments: template_name"
+ self.status = "failed"
+ return self
+ template_id, template_updated = self.create_project_or_template()
+
+ if template_updated:
+ # Template needs to be versioned
+ version_params = {
+ "comments": self.want.get("comments"),
+ "templateId": template_id
+ }
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="version_template",
+ op_modifies=True,
+ params=version_params
+ )
+ task_id = response.get("response").get("taskId")
+ if not task_id:
+ self.msg = "Task id: {0} not found".format(task_id)
+ self.status = "failed"
+ return self
+ task_details = self.get_task_details(task_id)
+ self.result['changed'] = True
+ self.result['msg'] = task_details.get('progress')
+ self.result['diff'] = configuration_templates
+ self.log("Task details for 'version_template': {0}".format(task_details), "DEBUG")
+ self.result['response'] = task_details if task_details else response
+
+ if not self.result.get('msg'):
+ self.msg = "Error while versioning the template"
+ self.status = "failed"
+ return self
+
+ def handle_export(self, export):
+ """
+ Export templates and projects in CCC with fields provided in Cisco Catalyst Center.
+
+ Parameters:
+ export (dict) - Playbook details containing export project/template information.
+
+ Returns:
+ self
+ """
+
+ export_project = export.get("project")
+ self.log("Export project playbook details: {0}"
+ .format(export_project), "DEBUG")
+ if export_project:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='export_projects',
+ params={
+ "payload": export_project,
+ "active_validation": False,
+ },
+ )
+ validation_string = "successfully exported project"
+ self.check_task_response_status(response,
+ validation_string,
+ True).check_return_status()
+ self.result['response'][0].update({"exportProject": self.msg})
+
+ export_values = export.get("template")
+ if export_values:
+ self.get_export_template_values(export_values).check_return_status()
+ self.log("Exporting template playbook details: {0}"
+ .format(self.export_template), "DEBUG")
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='export_templates',
+ params={
+ "payload": self.export_template,
+ "active_validation": False,
+ },
+ )
+ validation_string = "successfully exported template"
+ self.check_task_response_status(response,
+ validation_string,
+ True).check_return_status()
+ self.result['response'][0].update({"exportTemplate": self.msg})
+
+ return self
+
+ def handle_import(self, _import):
+ """
+ Import templates and projects in CCC with fields provided in Cisco Catalyst Center.
+
+ Parameters:
+ _import (dict) - Playbook details containing import project/template information.
+
+ Returns:
+ self
+ """
+
+ _import_project = _import.get("project")
+ if _import_project:
+ do_version = _import_project.get("do_version")
+ if not do_version:
+ do_version = False
+ payload = None
+ if _import.get("project").get("payload"):
+ payload = _import.get("project").get("payload")
+ else:
+ self.msg = "Mandatory parameter payload is not found under import project"
+ self.status = "failed"
+ return self
+ final_payload = []
+ for item in payload:
+ response = self.get_project_details(item.get("name"))
+ if response == []:
+ final_payload.append(item)
+ if final_payload != []:
+ _import_project = {
+ "do_version": do_version,
+ "payload": final_payload,
+ "active_validation": False,
+ }
+ self.log("Importing project details from the playbook: {0}"
+ .format(_import_project), "DEBUG")
+ if _import_project:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='imports_the_projects_provided',
+ params=_import_project,
+ )
+ validation_string = "successfully imported project"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.result['response'][0].update({"importProject": validation_string})
+ else:
+ self.msg = "Projects '{0}' already available.".format(payload)
+ self.result['response'][0].update({
+ "importProject": "Projects '{0}' already available.".format(payload)
+ })
+
+ _import_template = _import.get("template")
+ if _import_template:
+ do_version = _import_template.get("do_version")
+ if not do_version:
+ do_version = False
+ if not _import_template.get("project_name"):
+ self.msg = "Mandatory parameter project_name is not found under import template"
+ self.status = "failed"
+ return self
+
+ if not _import_template.get("payload"):
+ self.msg = "Mandatory parameter payload is not found under import template"
+ self.status = "failed"
+ return self
+
+ payload = _import_template.get("payload")
+ final_payload = []
+ for item in payload:
+ self.log(str(item))
+ final_payload.append(self.get_template_params(item))
+ self.log(str(final_payload))
+ import_template = {
+ "do_version": _import_template.get("do_version"),
+ "project_name": _import_template.get("project_name"),
+ "payload": final_payload,
+ "active_validation": False,
+ }
+ self.log("Import template details from the playbook: {0}"
+ .format(_import_template), "DEBUG")
+ if _import_template:
+ response = self.dnac._exec(
+ family="configuration_templates",
+ function='imports_the_templates_provided',
+ params=import_template
+ )
+ validation_string = "successfully imported template"
+ self.check_task_response_status(response, validation_string).check_return_status()
+ self.result['response'][0].update({"importTemplate": validation_string})
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Update/Create templates and projects in CCC with fields provided in Cisco Catalyst Center.
+ Export the tempaltes and projects.
+ Import the templates and projects.
+ Check using check_return_status().
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ self.update_configuration_templates(configuration_templates)
+ if self.status == "failed":
+ return self
+
+ export = config.get("export")
+ if export:
+ self.handle_export(export)
+ if self.status == "failed":
+ return self
+
+ _import = config.get("import")
+ if _import:
+ self.handle_import(_import)
+ if self.status == "failed":
+ return self
+
+ self.msg = "Successfully completed merged state execution"
+ self.status = "success"
+ return self
+
+ def delete_project_or_template(self, config, is_delete_project=False):
+ """
+ Call Cisco Catalyst Center API to delete project or template with provided inputs.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+ is_delete_project (bool) - True if we need to delete project, else False.
+
+ Returns:
+ self
+ """
+
+ if is_delete_project:
+ params_key = {"project_id": self.have_project.get("id")}
+ deletion_value = "deletes_the_project"
+ name = "project: {0}".format(config.get("configuration_templates").get('project_name'))
+ else:
+ template_params = self.want.get("template_params")
+ params_key = {"template_id": self.have_template.get("id")}
+ deletion_value = "deletes_the_template"
+ name = "templateName: {0}".format(template_params.get('templateName'))
+
+ response = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function=deletion_value,
+ params=params_key,
+ )
+ task_id = response.get("response").get("taskId")
+ if task_id:
+ task_details = self.get_task_details(task_id)
+ self.result['changed'] = True
+ self.result['msg'] = task_details.get('progress')
+ self.result['diff'] = config.get("configuration_templates")
+
+ self.log("Task details for '{0}': {1}".format(deletion_value, task_details), "DEBUG")
+ self.result['response'] = task_details if task_details else response
+ if not self.result['msg']:
+ self.result['msg'] = "Error while deleting {name} : "
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully deleted {0} ".format(name)
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete projects or templates in Cisco Catalyst Center with fields provided in playbook.
+
+ Parameters:
+ config (dict) - Playbook details containing template information.
+
+ Returns:
+ self
+ """
+
+ configuration_templates = config.get("configuration_templates")
+ if configuration_templates:
+ is_project_found = self.have_project.get("project_found")
+ projectName = config.get("configuration_templates").get("project_name")
+
+ if not is_project_found:
+ self.msg = "Project {0} is not found".format(projectName)
+ self.status = "failed"
+ return self
+
+ is_template_found = self.have_template.get("template_found")
+ template_params = self.want.get("template_params")
+ templateName = config.get("configuration_templates").get("template_name")
+ if template_params.get("name"):
+ if is_template_found:
+ self.delete_project_or_template(config)
+ else:
+ self.msg = "Invalid template {0} under project".format(templateName)
+ self.status = "failed"
+ return self
+ else:
+ self.log("Template name is empty, deleting the project '{0}' and "
+ "associated templates"
+ .format(config.get("configuration_templates").get("project_name")), "INFO")
+ is_project_deletable = self.have_project.get("isDeletable")
+ if is_project_deletable:
+ self.delete_project_or_template(config, is_delete_project=True)
+ else:
+ self.msg = "Project is not deletable"
+ self.status = "failed"
+ return self
+
+ self.msg = "Successfully completed delete state execution"
+ self.status = "success"
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ if config.get("configuration_templates") is not None:
+ is_template_available = self.get_have_project(config)
+ self.log("Template availability: {0}".format(is_template_available), "INFO")
+ if not is_template_available:
+ self.msg = "Configuration Template config is not applied to the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+
+ self.get_have_template(config, is_template_available)
+ self.log("Current State (have): {0}".format(self.want.get("template_params")), "INFO")
+ self.log("Desired State (want): {0}".format(self.have_template.get("template")), "INFO")
+ template_params = ["language", "name", "projectName", "softwareType",
+ "softwareVariant", "templateContent"]
+ for item in template_params:
+ if self.have_template.get("template").get(item) != self.want.get("template_params").get(item):
+ self.msg = " Configuration Template config is not applied to the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+ self.log("Successfully validated the Template in the Catalyst Center.", "INFO")
+ self.result.get("response").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the Configuration Templates."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing Global Pool,
+ Reserved Pool, and Network Management configuration.
+
+ Returns:
+ self
+ """
+
+ if config.get("configuration_templates") is not None:
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ template_list = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="gets_the_templates_available",
+ params={"projectNames": config.get("projectName")},
+ )
+ if template_list and isinstance(template_list, list):
+ templateName = config.get("configuration_templates").get("template_name")
+ template_info = get_dict_result(template_list,
+ "name",
+ templateName)
+ if template_info:
+ self.msg = "Configuration Template config is not applied to the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of template in the Catalyst Center.", "INFO")
+ self.result.get("response").update({"Validation": "Success"})
+
+ self.msg = "Successfully validated the absence of Template in the Cisco Catalyst Center."
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values.
+
+ Parameters:
+ self - The current object.
+
+ Returns:
+ None
+ """
+
+ self.have_project.clear()
+ self.have_template.clear()
+ self.want.clear()
+
+
+def main():
+ """ main entry point for module execution"""
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+ ccc_template = Template(module)
+ ccc_template.validate_input().check_return_status()
+ state = ccc_template.params.get("state")
+ config_verify = ccc_template.params.get("config_verify")
+ if state not in ccc_template.supported_states:
+ ccc_template.status = "invalid"
+ ccc_template.msg = "State {0} is invalid".format(state)
+ ccc_template.check_return_status()
+
+ for config in ccc_template.validated_config:
+ ccc_template.reset_values()
+ ccc_template.get_have(config).check_return_status()
+ ccc_template.get_want(config).check_return_status()
+ ccc_template.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_template.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_template.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/user.py b/ansible_collections/cisco/dnac/plugins/modules/user.py
index 5bfdc9ced..21769bb83 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/user.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/user.py
@@ -51,8 +51,8 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!update-user-api
notes:
- SDK Method used are
- userand_roles.UserandRoles.add_user_ap_i,
- userand_roles.UserandRoles.update_user_ap_i,
+ user_and_roles.UserandRoles.add_user_ap_i,
+ user_and_roles.UserandRoles.update_user_ap_i,
- Paths used are
post /dna/system/api/v1/user,
diff --git a/ansible_collections/cisco/dnac/plugins/modules/user_info.py b/ansible_collections/cisco/dnac/plugins/modules/user_info.py
index bb25c6659..f00e1d66e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/user_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/user_info.py
@@ -32,7 +32,7 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!get-users-api
notes:
- SDK Method used are
- userand_roles.UserandRoles.get_users_ap_i,
+ user_and_roles.UserandRoles.get_users_api,
- Paths used are
get /dna/system/api/v1/user,
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
index 2360060ad..536f958c8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
@@ -32,7 +32,7 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!get-external-authentication-servers-api
notes:
- SDK Method used are
- userand_roles.UserandRoles.get_external_authentication_servers_ap_i,
+ user_and_roles.UserandRoles.get_external_authentication_servers_ap_i,
- Paths used are
get /dna/system/api/v1/users/external-servers,
diff --git a/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py b/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
index 80afbe39a..7448c4c50 100644
--- a/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
+++ b/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
@@ -248,7 +248,7 @@ class DNACSDK(object):
return self.result
def verify_array(self, verify_interface, **kwargs):
- if type(verify_interface) is None:
+ if verify_interface is None:
return list()
if isinstance(verify_interface, list):
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
index 4cd6816af..199ea4ac4 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
@@ -352,6 +352,7 @@ plugins/action/syslog_config_create.py compile-2.6!skip # Python 2.6 is not supp
plugins/action/syslog_config_update.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
@@ -706,17 +707,36 @@ plugins/action/syslog_config_create.py compile-2.7!skip # Python 2.7 is not supp
plugins/action/syslog_config_update.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
index c18cef239..91834bb50 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
@@ -354,6 +354,7 @@ plugins/action/syslog_config_create.py compile-2.6!skip # Python 2.6 is not supp
plugins/action/syslog_config_update.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
@@ -708,6 +709,7 @@ plugins/action/syslog_config_create.py compile-2.7!skip # Python 2.7 is not supp
plugins/action/syslog_config_update.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
@@ -1062,12 +1064,30 @@ plugins/action/syslog_config_create.py import-2.7 # Python 2.7 is not supported
plugins/action/syslog_config_update.py import-2.7 # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py import-2.7 # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py import-2.7 # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
index 4c6b7ecdb..41aebb287 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
@@ -2,19 +2,39 @@ plugins/module_utils/dnac.py compile-2.6!skip # Python 2.6 is not supported by t
plugins/module_utils/dnac.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
-plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK \ No newline at end of file
+plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
index 1ef6913d1..29449f7cb 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
@@ -1,10 +1,20 @@
plugins/module_utils/dnac.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
-plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK \ No newline at end of file
+plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
index 1ef6913d1..29449f7cb 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
@@ -1,10 +1,20 @@
plugins/module_utils/dnac.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
-plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK \ No newline at end of file
+plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
index 20e7272b2..29449f7cb 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
@@ -1,37 +1,20 @@
plugins/module_utils/dnac.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
-plugins/action/business_sda_hostonboarding_ssid_ippool.py pylint:unused-import
-plugins/action/path_trace.py pylint:unused-import
-plugins/action/pnp_global_settings.py pylint:unused-import
-plugins/action/sda_fabric.py pylint:unused-import
-plugins/action/sda_fabric_authentication_profile.py pylint:unused-import
-plugins/action/sda_fabric_border_device.py pylint:unused-import
-plugins/action/sda_fabric_control_plane_device.py pylint:unused-import
-plugins/action/sda_fabric_edge_device.py pylint:unused-import
-plugins/action/sda_fabric_site.py pylint:unused-import
-plugins/action/sda_multicast.py pylint:unused-import
-plugins/action/sda_port_assignment_for_access_point.py pylint:unused-import
-plugins/action/sda_port_assignment_for_user_device.py pylint:unused-import
-plugins/action/sda_provision_device.py pylint:unused-import
-plugins/action/sda_virtual_network.py pylint:unused-import
-plugins/action/sda_virtual_network_ip_pool.py pylint:unused-import
-plugins/action/sda_virtual_network_v2.py pylint:unused-import
-plugins/action/transit_peer_network.py pylint:unused-import
-plugins/action/wireless_profile.py pylint:unused-import
-plugins/module_utils/dnac.py pylint:unused-import
-plugins/modules/pnp_intent.py pylint:unused-import
-plugins/modules/swim_intent.py pylint:unused-import
-plugins/modules/template_intent.py pylint:unused-import
-plugins/plugin_utils/dnac.py pylint:unused-import
-tests/unit/modules/dnac/test_pnp_intent.py pylint:unused-import
-tests/unit/modules/dnac/test_site_intent.py pylint:unused-import
-tests/unit/modules/dnac/test_swim_intent.py pylint:unused-import
-tests/unit/modules/dnac/test_template_intent.py pylint:unused-import \ No newline at end of file
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
index 4cd6816af..199ea4ac4 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
@@ -352,6 +352,7 @@ plugins/action/syslog_config_create.py compile-2.6!skip # Python 2.6 is not supp
plugins/action/syslog_config_update.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
@@ -706,17 +707,36 @@ plugins/action/syslog_config_create.py compile-2.7!skip # Python 2.7 is not supp
plugins/action/syslog_config_update.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/action/transit_peer_network_info.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/module_utils/dnac.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/pnp_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/site_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/swim_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_intent.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/network_settings_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/device_credential_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/dnac_module.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/dnac_module.py
index c05b5a6ee..2a2ee78ce 100644
--- a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/dnac_module.py
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/dnac_module.py
@@ -28,6 +28,7 @@ from ansible_collections.ansible.netcommon.tests.unit.modules.utils import (
from ansible_collections.ansible.netcommon.tests.unit.modules.utils import (
set_module_args as _set_module_args,
)
+from unittest.mock import patch
def set_module_args(args):
@@ -75,9 +76,107 @@ def load_fixture(module_name, name, device=""):
class TestDnacModule(ModuleTestCase):
+
+ def __init__(self, module):
+
+ """
+ Initialize an instance of class .
+
+ Parameters:
+ - module (ModuleType): The Python module associated with this instance.
+
+ Attributes:
+ - module (ModuleType): The provided module.
+ - test_data (dict): The loaded playbook data from the module.
+ - playbook_config (dict): The playbook configuration.
+ - playbook_config_missing_param (dict): The playbook configuration with missing parameters.
+ """
+
+ self.module = module
+ self.test_data = self.loadPlaybookData(str(self.module.__name__))
+ self.playbook_config = self.test_data.get("playbook_config")
+ self.playbook_config_missing_param = self.test_data.get("playbook_config_missing_param")
+
+ def setUp(self):
+
+ """
+ Set up the test environment by mocking Cisco DNA Center SDK initialization and execution.
+ This method is automatically called before each test case to ensure a clean and controlled environment.
+ Mocks the initialization and execution of the Cisco DNA Center SDK to isolate testing from actual SDK operations.
+
+ Mocked attributes:
+ - mock_dnac_init: Mocks the initialization of the DNACSDK class.
+ - run_dnac_init: The started mock for DNACSDK initialization.
+ - mock_dnac_exec: Mocks the execution of DNACSDK methods.
+ - run_dnac_exec: The started mock for DNACSDK method execution.
+ """
+
+ self.mock_dnac_init = patch(
+ "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.__init__")
+ self.run_dnac_init = self.mock_dnac_init.start()
+ self.run_dnac_init.side_effect = [None]
+ self.mock_dnac_exec = patch(
+ "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.exec"
+ )
+ self.run_dnac_exec = self.mock_dnac_exec.start()
+
+ def tearDown(self):
+
+ """
+ Clean up the test environment by stopping the mocked Cisco DNA Center SDK initialization and execution.
+ This method is automatically called after each test case to clean up any resources or mocks created during testing.
+ Stops the mock instances of the Cisco DNA Center SDK initialization and execution.
+ """
+
+ self.mock_dnac_exec.stop()
+ self.mock_dnac_init.stop()
+
+ def loadPlaybookData(self, module):
+
+ """
+ Load JSON data from a file.
+
+ Parameters:
+ - module (str): The name of the module used to construct the filename.
+
+ Returns:
+ - dict: The loaded JSON data.
+
+ Raises:
+ - FileNotFoundError: If the file does not exist.
+ - json.JSONDecodeError: If there is an error decoding the JSON data.
+ """
+
+ file_path = os.path.join(fixture_path, "{0}.json".format(module))
+ print(file_path)
+ try:
+ with open(file_path) as f:
+ data = f.read()
+ j_data = json.loads(data)
+ except Exception as e:
+ print(e)
+ pass
+
+ return j_data
+
def execute_module_devices(
self, failed=False, changed=False, response=None, sort=True, defaults=False
):
+
+ """
+ This method executes a module for a single device.
+
+ Parameters:
+ - failed (bool, optional): If True, check for failures. Defaults to False.
+ - changed (bool, optional): If True, check for changes. Defaults to False.
+ - response (list, optional): The expected response data. Defaults to None.
+ - sort (bool, optional): If True, sort the response data before comparison. Defaults to True.
+ - device (str, optional): The device to execute the module on. Defaults to an empty string.
+
+ Returns:
+ - dict: A dictionary containing the execution result.
+ """
+
module_name = self.module.__name__.rsplit(".", 1)[1]
local_fixture_path = os.path.join(fixture_path, module_name)
@@ -101,6 +200,22 @@ class TestDnacModule(ModuleTestCase):
self, failed=False, changed=False, response=None, sort=True, device=""
):
+ """
+ Execute a module for a specific device and perform validation.
+
+ This method executes the module for a specific device, performs validation checks, and returns the result.
+
+ Parameters:
+ - failed (bool, optional): If True, check for failures. Defaults to False.
+ - changed (bool, optional): If True, check for changes. Defaults to False.
+ - response (list, optional): The expected response data. Defaults to None.
+ - sort (bool, optional): If True, sort the response data before comparison. Defaults to True.
+ - device (str, optional): The device to execute the module on. Defaults to an empty string.
+
+ Returns:
+ - dict: A dictionary containing the execution result, including 'failed', 'changed', and 'response' keys.
+ """
+
self.load_fixtures(response, device=device)
if failed:
@@ -121,6 +236,14 @@ class TestDnacModule(ModuleTestCase):
return result
def failed(self):
+
+ """
+ Check for failures during module execution.
+
+ Returns:
+ - dict: A dictionary containing the failure status and additional information.
+ """
+
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
@@ -129,12 +252,20 @@ class TestDnacModule(ModuleTestCase):
return result
def changed(self, changed=False):
+
+ """
+ Check for changes during module execution.
+
+ Parameters:
+ - changed (bool, optional): If True, check for changes. Defaults to False.
+
+ Returns:
+ - dict: A dictionary containing the change status and additional information.
+ """
+
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result["changed"], changed, result)
return result
-
- def load_fixtures(self, response=None, device=""):
- pass
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_discovery_intent.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_discovery_intent.py
new file mode 100644
index 000000000..82f9999de
--- /dev/null
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_discovery_intent.py
@@ -0,0 +1,170 @@
+# Copyright (c) 2020 Cisco and/or its affiliates.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible_collections.cisco.dnac.plugins.modules import discovery_intent
+from .dnac_module import TestDnacModule, set_module_args
+
+
+class TestDnacDiscoveryIntent(TestDnacModule):
+ def __init__(self):
+
+ """
+ Inheriting from the base class of dnac_module
+ """
+
+ module = discovery_intent
+ super().__init__(module)
+
+ def load_fixtures(self, response=None, device=""):
+
+ """
+ Load fixtures for a specific device.
+
+ Parameters:
+ response (list, optional): The expected response data. Defaults to None.
+ device (str, optional): The device for which to load fixtures. Defaults to an empty string.
+ """
+
+ if "create_discovery" in self._testMethodName:
+ self.run_dnac_exec.side_effect = [
+ Exception(),
+ self.test_data.get("create_discovery_response"),
+ self.test_data.get("get_business_api_execution_details_response"),
+ self.test_data.get("get_discovery_response")
+ ]
+ elif "delete_existing_discovery" in self._testMethodName:
+ self.run_dnac_exec.side_effect = [
+ self.test_data.get("delete_get_discovery_response"),
+ self.test_data.get("delete_delete_discovery_response"),
+ self.test_data.get("get_business_api_execution_details_response")
+ ]
+ elif "delete_non_existing_discovery" in self._testMethodName:
+ self.run_dnac_exec.side_effect = [
+ Exception()
+ ]
+ elif "error_delete" in self._testMethodName:
+ self.run_dnac_exec.side_effect = [
+ self.test_data.get("delete_error_get_discovery_response"),
+ self.test_data.get("delete_delete_discovery_response"),
+ self.test_data.get("delete_execution_details_error")
+ ]
+ elif "error_create" in self._testMethodName:
+ self.run_dnac_exec.side_effect = [
+ Exception(),
+ self.test_data.get("create_discovery_response"),
+ self.test_data.get("delete_execution_details_error")
+ ]
+
+ def test_discovery_intent_create_discovery(self):
+ set_module_args(
+ dict(
+ dnac_host="172.23.241.186",
+ dnac_username="admin",
+ dnac_password="Maglev123",
+ dnac_verify=False,
+ dnac_log=True,
+ state="merged",
+ headers=None,
+ name=self.playbook_config.get('name'),
+ devices_list=self.playbook_config.get('devices_list'),
+ discoveryType="MULTI RANGE",
+ protocolOrder="ssh",
+ startIndex=1,
+ recordsToReturn=25
+ )
+ )
+ result = self.execute_module(changed=True, failed=False)
+ self.assertEqual(
+ result.get('msg'),
+ "Discovery Created Successfully"
+ )
+
+ def test_discovery_intent_delete_existing_discovery(self):
+ set_module_args(
+ dict(
+ dnac_host="172.23.241.186",
+ dnac_username="admin",
+ dnac_password="Maglev123",
+ dnac_verify=False,
+ dnac_log=True,
+ state="deleted",
+ headers=None,
+ name=self.playbook_config.get('name'),
+ devices_list=self.playbook_config.get('devices_list'),
+ discoveryType="MULTI RANGE",
+ protocolOrder="ssh",
+ startIndex=1,
+ recordsToReturn=25
+ )
+ )
+ result = self.execute_module(changed=True, failed=False)
+ self.assertEqual(
+ result.get('msg'),
+ "Discovery Deleted Successfully"
+ )
+
+ def test_discovery_intent_delete_non_existing_discovery(self):
+ set_module_args(
+ dict(
+ dnac_host="172.23.241.186",
+ dnac_username="admin",
+ dnac_password="Maglev123",
+ dnac_verify=False,
+ dnac_log=True,
+ state="deleted",
+ headers=None,
+ name=self.playbook_config.get('delete_non_exist_discovery_name'),
+ devices_list=self.playbook_config.get('devices_list'),
+ discoveryType="MULTI RANGE",
+ protocolOrder="ssh",
+ startIndex=1,
+ recordsToReturn=25
+ )
+ )
+ result = self.execute_module(changed=False, failed=False)
+ self.assertIsNone(result.get('exist_discovery'))
+ self.assertEqual(
+ result.get('msg'),
+ f"Discovery {self.playbook_config.get('delete_non_exist_discovery_name')} Not Found"
+ )
+
+ def test_discovery_intent_invalid_state(self):
+
+ set_module_args(
+ dict(
+ dnac_host="172.23.241.186",
+ dnac_username="admin",
+ dnac_password="Maglev123",
+ dnac_verify=False,
+ dnac_log=True,
+ state="present",
+ headers=None,
+ name=self.playbook_config.get('name'),
+ devices_list=self.playbook_config.get('devices_list'),
+ discoveryType="MULTI RANGE",
+ protocolOrder="ssh",
+ startIndex=1,
+ recordsToReturn=25
+ )
+ )
+ result = self.execute_module(changed=False, failed=True)
+ self.assertEqual(
+ result.get('msg'),
+ "value of state must be one of: merged, deleted, got: present"
+ )
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_pnp_intent.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_pnp_intent.py
index b3c15a4ff..6f93e2a8a 100644
--- a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_pnp_intent.py
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_pnp_intent.py
@@ -17,39 +17,31 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from unittest.mock import patch
from ansible.errors import AnsibleActionFail
from ansible_collections.cisco.dnac.plugins.modules import pnp_intent
-from .dnac_module import TestDnacModule, set_module_args, loadPlaybookData
+from .dnac_module import TestDnacModule, set_module_args
class TestDnacPnPIntent(TestDnacModule):
+ def __init__(self):
- module = pnp_intent
+ """
+ Inheriting from the base class of dnac_module
+ """
- test_data = loadPlaybookData("pnp_intent")
+ module = pnp_intent
+ super().__init__(module)
- playbook_config = test_data.get("playbook_config")
- playbook_config_missing_param = test_data.get("playbook_config_missing_param")
-
- def setUp(self):
- super(TestDnacPnPIntent, self).setUp()
+ def load_fixtures(self, response=None, device=""):
- self.mock_dnac_init = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.__init__")
- self.run_dnac_init = self.mock_dnac_init.start()
- self.run_dnac_init.side_effect = [None]
- self.mock_dnac_exec = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.exec"
- )
- self.run_dnac_exec = self.mock_dnac_exec.start()
+ """
+ Load fixtures for a specific device.
- def tearDown(self):
- super(TestDnacPnPIntent, self).tearDown()
- self.mock_dnac_exec.stop()
- self.mock_dnac_init.stop()
+ Parameters:
+ response (list, optional): The expected response data. Defaults to None.
+ device (str, optional): The device for which to load fixtures. Defaults to an empty string.
+ """
- def load_fixtures(self, response=None, device=""):
if "site_not_found" in self._testMethodName:
self.run_dnac_exec.side_effect = [
self.test_data.get("image_exists_response"),
@@ -111,6 +103,13 @@ class TestDnacPnPIntent(TestDnacModule):
]
def test_pnp_intent_site_not_found(self):
+
+ """
+ Test case for PnP intent when site is not found.
+
+ This test case checks the behavior of the PnP intent when the site is not found in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -128,6 +127,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_add_new_device(self):
+
+ """
+ Test case for PnP intent when adding a new device.
+
+ This test case checks the behavior of the PnP intent when adding a new device in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -145,6 +151,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_device_exists(self):
+
+ """
+ Test case for PnP intent when a device already exists.
+
+ This test case checks the behavior of the PnP intent when a device already exists in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -162,6 +175,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_image_doesnot_exist(self):
+
+ """
+ Test case for PnP intent when an image does not exist.
+
+ This test case checks the behavior of the PnP intent when the specified image is not found in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -179,6 +199,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_template_doesnot_exist(self):
+
+ """
+ Test case for PnP intent when a template does not exist.
+
+ This test case checks the behavior of the PnP intent when the specified template is not found in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -196,6 +223,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_project_not_found(self):
+
+ """
+ Test case for PnP intent when a project is not found.
+
+ This test case checks the behavior of the PnP intent when the specified project is not found in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -213,6 +247,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_missing_param(self):
+
+ """
+ Test case for PnP intent with missing parameters in the playbook.
+
+ This test case checks the behavior of the PnP intent when the playbook contains missing required parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -230,6 +271,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_delete_device(self):
+
+ """
+ Test case for PnP intent when deleting a device.
+
+ This test case checks the behavior of the PnP intent when deleting a device in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -247,6 +295,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_deletion_error(self):
+
+ """
+ Test case for PnP intent when device deletion fails.
+
+ This test case checks the behavior of the PnP intent when device deletion fails in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -264,6 +319,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_delete_nonexisting_device(self):
+
+ """
+ Test case for PnP intent when deleting a non-existing device.
+
+ This test case checks the behavior of the PnP intent when trying to delete a device that doesn't exist in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -281,6 +343,13 @@ class TestDnacPnPIntent(TestDnacModule):
)
def test_pnp_intent_invalid_state(self):
+
+ """
+ Test case for PnP intent with an invalid state parameter.
+
+ This test case checks the behavior of the PnP intent when an invalid 'state' parameter is provided in the playbook.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_site_intent.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_site_intent.py
index a12f1d1c4..089d4ee85 100644
--- a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_site_intent.py
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_site_intent.py
@@ -17,39 +17,29 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from unittest.mock import patch
-
from ansible_collections.cisco.dnac.plugins.modules import site_intent
-from .dnac_module import TestDnacModule, set_module_args, loadPlaybookData
+from .dnac_module import TestDnacModule, set_module_args
class TestDnacSiteIntent(TestDnacModule):
+ def __init__(self):
+ """
+ Inheriting from the base class of dnac_module
+ """
- module = site_intent
-
- test_data = loadPlaybookData("site_intent")
+ module = site_intent
+ super().__init__(module)
- playbook_config = test_data.get("playbook_config")
- playbook_config_missing_param = test_data.get("playbook_config_missing_param")
-
- def setUp(self):
- super(TestDnacSiteIntent, self).setUp()
+ def load_fixtures(self, response=None, device=""):
- self.mock_dnac_init = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.__init__")
- self.run_dnac_init = self.mock_dnac_init.start()
- self.run_dnac_init.side_effect = [None]
- self.mock_dnac_exec = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.exec"
- )
- self.run_dnac_exec = self.mock_dnac_exec.start()
+ """
+ Load fixtures for a specific device.
- def tearDown(self):
- super(TestDnacSiteIntent, self).tearDown()
- self.mock_dnac_exec.stop()
- self.mock_dnac_init.stop()
+ Parameters:
+ response (list, optional): The expected response data. Defaults to None.
+ device (str, optional): The device for which to load fixtures. Defaults to an empty string.
+ """
- def load_fixtures(self, response=None, device=""):
if "create_site" in self._testMethodName:
self.run_dnac_exec.side_effect = [
Exception(),
@@ -93,6 +83,13 @@ class TestDnacSiteIntent(TestDnacModule):
]
def test_site_intent_create_site(self):
+
+ """
+ Test case for site intent when creating a site.
+
+ This test case checks the behavior of the site intent when creating a new site in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -110,6 +107,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_update_not_needed(self):
+
+ """
+ Test case for site intent when no update is needed.
+
+ This test case checks the behavior of the site intent when an update is not required for the specified site in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -127,6 +131,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_update_needed(self):
+
+ """
+ Test case for site intent when an update is needed.
+
+ This test case checks the behavior of the site intent when an update is required for the specified site in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -144,6 +155,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_delete_existing_site(self):
+
+ """
+ Test case for site intent when deleting an existing site.
+
+ This test case checks the behavior of the site intent when deleting an existing site in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -161,6 +179,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_delete_non_existing_site(self):
+
+ """
+ Test case for site intent when attempting to delete a non-existing site.
+
+ This test case checks the behavior of the site intent when trying to delete a site that does not exist in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -178,6 +203,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_invalid_param(self):
+
+ """
+ Test case for site intent with invalid parameters in the playbook.
+
+ This test case checks the behavior of the site intent when the playbook contains invalid parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -194,6 +226,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_error_delete(self):
+
+ """
+ Test case for site intent when an error occurs during site deletion.
+
+ This test case checks the behavior of the site intent when an error occurs while deleting a site in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -211,6 +250,13 @@ class TestDnacSiteIntent(TestDnacModule):
)
def test_site_intent_error_create(self):
+
+ """
+ Test case for site intent when an error occurs during site creation.
+
+ This test case checks the behavior of the site intent when an error occurs while creating a site in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -229,6 +275,12 @@ class TestDnacSiteIntent(TestDnacModule):
def test_site_intent_invalid_state(self):
+ """
+ Test case for site intent with an invalid 'state' parameter.
+
+ This test case checks the behavior of the site intent when an invalid 'state' parameter is provided in the playbook.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_swim_intent.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_swim_intent.py
index 18acf4762..d2915e621 100644
--- a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_swim_intent.py
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_swim_intent.py
@@ -17,37 +17,29 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from unittest.mock import patch
-
from ansible_collections.cisco.dnac.plugins.modules import swim_intent
-from .dnac_module import TestDnacModule, set_module_args, loadPlaybookData
+from .dnac_module import TestDnacModule, set_module_args
class TestDnacSwimIntent(TestDnacModule):
+ def __init__(self):
+ """
+ Inheriting from the base class of dnac_module
+ """
- module = swim_intent
- test_data = loadPlaybookData("swim_intent")
- playbook_config = test_data.get("playbook_config")
- playbook_config_untag_image = test_data.get("playbook_config_untag_golden_image")
+ module = swim_intent
+ super().__init__(module)
- def setUp(self):
- super(TestDnacSwimIntent, self).setUp()
+ def load_fixtures(self, response=None, device=""):
- self.mock_dnac_init = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.__init__")
- self.run_dnac_init = self.mock_dnac_init.start()
- self.run_dnac_init.side_effect = [None]
- self.mock_dnac_exec = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.exec"
- )
- self.run_dnac_exec = self.mock_dnac_exec.start()
+ """
+ Load fixtures for a specific device.
- def tearDown(self):
- super(TestDnacSwimIntent, self).tearDown()
- self.mock_dnac_exec.stop()
- self.mock_dnac_init.stop()
+ Parameters:
+ response (list, optional): The expected response data. Defaults to None.
+ device (str, optional): The device for which to load fixtures. Defaults to an empty string.
+ """
- def load_fixtures(self, response=None, device=""):
if "full_flow" in self._testMethodName:
self.run_dnac_exec.side_effect = [
self.test_data.get("task_info_response"),
@@ -117,6 +109,13 @@ class TestDnacSwimIntent(TestDnacModule):
]
def test_swim_full_flow(self):
+
+ """
+ Test case for a full Software Image Management (SWIM) flow.
+
+ This test case covers the full SWIM flow, including image activation, import, tagging, distribution, and various error scenarios.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -133,6 +132,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_image_import(self):
+
+ """
+ Test case for SWIM image import when the image already exists.
+
+ This test case checks the behavior of SWIM when importing an image that already exists in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -149,6 +155,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_image_local_import(self):
+
+ """
+ Test case for SWIM local image import when the image already exists.
+
+ This test case checks the behavior of SWIM when importing a local image that already exists in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -165,6 +178,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_untag_image(self):
+
+ """
+ Test case for SWIM untagging an image as Golden.
+
+ This test case checks the behavior of SWIM when untagging an image as a Golden image in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -181,6 +201,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_missing_param_tag_golden_image(self):
+
+ """
+ Test case for SWIM with missing parameters for tagging a Golden image.
+
+ This test case checks the behavior of SWIM when attempting to tag an image as Golden with missing parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -197,6 +224,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_incorrect_site_untag_golden_image(self):
+
+ """
+ Test case for SWIM when trying to untag an image from a non-existing site.
+
+ This test case checks the behavior of SWIM when attempting to untag an image from a non-existing site.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -213,6 +247,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_image_doesnot_exist_response(self):
+
+ """
+ Test case for SWIM when the image does not exist in the response.
+
+ This test case checks the behavior of SWIM when the requested image is not found in the DNAC response.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -229,6 +270,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_only_image_distribution(self):
+
+ """
+ Test case for SWIM with only image distribution.
+
+ This test case checks the behavior of SWIM when distributing an image to devices.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -245,6 +293,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_image_distribution_missing_param(self):
+
+ """
+ Test case for SWIM image distribution with missing parameters.
+
+ This test case checks the behavior of SWIM when attempting to distribute an image with missing parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -261,6 +316,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_only_image_activation(self):
+
+ """
+ Test case for SWIM with only image activation.
+
+ This test case checks the behavior of SWIM when activating an image.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -277,6 +339,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_image_activation_missing_param(self):
+
+ """
+ Test case for SWIM image activation with missing parameters.
+
+ This test case checks the behavior of SWIM when attempting to activate an image with missing parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -293,6 +362,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_tag_golden_incorrect_family_name(self):
+
+ """
+ Test case for SWIM when tagging an image as Golden with an incorrect family name.
+
+ This test case checks the behavior of SWIM when attempting to tag an image as Golden with an incorrect family device name.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -309,6 +385,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_device_doesnot_exist(self):
+
+ """
+ Test case for SWIM when the device does not exist.
+
+ This test case checks the behavior of SWIM when the specified device is not found in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -325,6 +408,13 @@ class TestDnacSwimIntent(TestDnacModule):
)
def test_swim_incorrect_image_import_parameter(self):
+
+ """
+ Test case for SWIM with incorrect image import parameters.
+
+ This test case checks the behavior of SWIM when using incorrect image import parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
diff --git a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_template_intent.py b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_template_intent.py
index c0f866d46..512d31a82 100644
--- a/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_template_intent.py
+++ b/ansible_collections/cisco/dnac/tests/unit/modules/dnac/test_template_intent.py
@@ -17,37 +17,29 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from unittest.mock import patch
from ansible_collections.cisco.dnac.plugins.modules import template_intent
-from .dnac_module import TestDnacModule, set_module_args, loadPlaybookData
+from .dnac_module import TestDnacModule, set_module_args
class TestDnacTemplateIntent(TestDnacModule):
+ def __init__(self):
+ """
+ Inheriting from the base class of dnac_module
+ """
- module = template_intent
+ module = template_intent
+ super().__init__(module)
- test_data = loadPlaybookData("template_intent")
-
- playbook_config = test_data.get("playbook_config")
- playbook_config_missing_param = test_data.get("playbook_config_missing_param")
+ def load_fixtures(self, response=None, device=""):
- def setUp(self):
- super(TestDnacTemplateIntent, self).setUp()
- self.mock_dnac_init = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.__init__")
- self.run_dnac_init = self.mock_dnac_init.start()
- self.run_dnac_init.side_effect = [None]
- self.mock_dnac_exec = patch(
- "ansible_collections.cisco.dnac.plugins.module_utils.dnac.DNACSDK.exec"
- )
- self.run_dnac_exec = self.mock_dnac_exec.start()
+ """
+ Load fixtures for a specific device.
- def tearDown(self):
- super(TestDnacTemplateIntent, self).tearDown()
- self.mock_dnac_exec.stop()
- self.mock_dnac_init.stop()
+ Parameters:
+ response (list, optional): The expected response data. Defaults to None.
+ device (str, optional): The device for which to load fixtures. Defaults to an empty string.
+ """
- def load_fixtures(self, response=None, device=""):
if "create_template" in self._testMethodName:
self.run_dnac_exec.side_effect = [
self.test_data.get("create_template_list_response"),
@@ -87,6 +79,13 @@ class TestDnacTemplateIntent(TestDnacModule):
]
def test_template_intent_create_template(self):
+
+ """
+ Test case for template intent when creating a template.
+
+ This test case checks the behavior of the template intent when creating a new template in the specified DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -104,6 +103,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_update_not_needed(self):
+
+ """
+ Test case for template intent when no update is needed.
+
+ This test case checks the behavior of the template intent when an update is not required for the specified template in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -121,6 +127,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_update_needed(self):
+
+ """
+ Test case for template intent when an update is needed.
+
+ This test case checks the behavior of the template intent when an update is required for the specified template in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -138,6 +151,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_project_not_found(self):
+
+ """
+ Test case for template intent when the project is not found.
+
+ This test case checks the behavior of the template intent when the specified project is not found in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -155,6 +175,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_delete_non_existing_template(self):
+
+ """
+ Test case for template intent when trying to delete a non-existing template.
+
+ This test case checks the behavior of the template intent when trying to delete a template that does not exist in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -172,6 +199,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_delete_template(self):
+
+ """
+ Test case for template intent when deleting a template.
+
+ This test case checks the behavior of the template intent when deleting an existing template in the DNAC.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -189,6 +223,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_missing_param(self):
+
+ """
+ Test case for template intent with missing parameters in the playbook.
+
+ This test case checks the behavior of the template intent when the playbook contains missing required parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -206,6 +247,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_invalid_state(self):
+
+ """
+ Test case for template intent with an invalid 'state' parameter.
+
+ This test case checks the behavior of the template intent when an invalid 'state' parameter is provided in the playbook.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",
@@ -223,6 +271,13 @@ class TestDnacTemplateIntent(TestDnacModule):
)
def test_template_intent_invalid_param(self):
+
+ """
+ Test case for template intent with invalid parameters in the playbook.
+
+ This test case checks the behavior of the template intent when the playbook contains invalid parameters.
+ """
+
set_module_args(
dict(
dnac_host="1.1.1.1",