summaryrefslogtreecommitdiffstats
path: root/ansible_collections/splunk/es/tests
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/splunk/es/tests')
-rw-r--r--ansible_collections/splunk/es/tests/.keep0
-rw-r--r--ansible_collections/splunk/es/tests/integration/network-integration.cfg4
-rw-r--r--ansible_collections/splunk/es/tests/integration/target-prefixes.network1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml55
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml74
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml58
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml58
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases1
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml0
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml18
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml7
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml49
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml33
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml22
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml82
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml53
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml102
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml101
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml38
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml33
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml23
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml70
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml72
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml118
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml18
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml7
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml22
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml36
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml25
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml57
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml43
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml73
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml46
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml18
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml7
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml6
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml43
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml16
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml47
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml38
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml77
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml72
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml131
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml129
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt1
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt1
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt9
-rw-r--r--ansible_collections/splunk/es/tests/unit/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/builtins.py34
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/mock.py128
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/unittest.py39
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/loader.py116
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/path.py12
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/procenv.py94
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/vault_helper.py42
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py167
-rw-r--r--ansible_collections/splunk/es/tests/unit/modules/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/modules/conftest.py40
-rw-r--r--ansible_collections/splunk/es/tests/unit/modules/utils.py51
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py443
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py373
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py357
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py711
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py0
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py40
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py51
-rw-r--r--ansible_collections/splunk/es/tests/unit/requirements.txt42
85 files changed, 4780 insertions, 0 deletions
diff --git a/ansible_collections/splunk/es/tests/.keep b/ansible_collections/splunk/es/tests/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/.keep
diff --git a/ansible_collections/splunk/es/tests/integration/network-integration.cfg b/ansible_collections/splunk/es/tests/integration/network-integration.cfg
new file mode 100644
index 000000000..d12c1efe2
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/network-integration.cfg
@@ -0,0 +1,4 @@
+[persistent_connection]
+command_timeout = 100
+connect_timeout = 100
+connect_retry_timeout = 100
diff --git a/ansible_collections/splunk/es/tests/integration/target-prefixes.network b/ansible_collections/splunk/es/tests/integration/target-prefixes.network
new file mode 100644
index 000000000..2a852434e
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/target-prefixes.network
@@ -0,0 +1 @@
+splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases
new file mode 100644
index 000000000..f4c7f6a2b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases
@@ -0,0 +1 @@
+network/splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml
new file mode 100644
index 000000000..d111fea78
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml
@@ -0,0 +1,55 @@
+---
+- name: remove previous correlation_search
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "absent"
+
+- name: create correlation_search
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+
+- name: Test splunk.es.adaptive_response_notable_event
+ adaptive_response_notable_event:
+ name: "Fake notable event from playbook"
+ correlation_search_name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake notable event from playbook, description edition."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+ default_status: unassigned
+ register: adaptive_response_notable_event_out
+
+- name: Assert Create splunk.es.adaptive_response_notable_event CHANGED
+ assert:
+ that:
+ - adaptive_response_notable_event_out['changed'] == True
+ - adaptive_response_notable_event_out['failed'] == False
+
+- name: Validate splunk.es.adaptive_response_notable_event idempotent
+ adaptive_response_notable_event:
+ name: "Fake notable event from playbook"
+ correlation_search_name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake notable event from playbook, description edition."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+ register: adaptive_response_notable_event_out2
+
+- name: Assert Create splunk.es.adaptive_response_notable_event IDEMPOTENT
+ assert:
+ that:
+ - adaptive_response_notable_event_out2['changed'] == False
+ - adaptive_response_notable_event_out2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases
new file mode 100644
index 000000000..f4c7f6a2b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases
@@ -0,0 +1 @@
+network/splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml
new file mode 100644
index 000000000..a2ae59ef4
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml
@@ -0,0 +1,74 @@
+---
+- name: Cleanup old correlation_search
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "absent"
+
+- name: Test correlation_search - CREATE
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+ register: correlation_search_create_output
+
+- name: Assert Create splunk.es.correlation_search CHANGED
+ assert:
+ that:
+ - correlation_search_create_output['changed'] == True
+ - correlation_search_create_output['failed'] == False
+
+- name: Test correlation_search - CREATE IDEMPOTENT
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+ register: correlation_search_create_output2
+
+- name: Assert Create splunk.es.correlation_search IDEMPOTENT
+ assert:
+ that:
+ - correlation_search_create_output2['changed'] == False
+ - correlation_search_create_output2['failed'] == False
+
+- name: Test correlation_search_info
+ correlation_search_info:
+ name: "Test Fake Coorelation Search From Playbook"
+ register: correlation_search_info_output
+
+- name: Assert Create splunk.es.correlation_search CHANGED
+ assert:
+ that:
+ - correlation_search_info_output['changed'] == False
+ - correlation_search_info_output['failed'] == False
+
+- name: Test correlation_search - DELETE
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "absent"
+ register: correlation_search_delete_output
+
+- name: Assert Create splunk.es.correlation_search CHANGED
+ assert:
+ that:
+ - correlation_search_delete_output['changed'] == True
+ - correlation_search_delete_output['failed'] == False
+
+- name: Test correlation_search - DELETE IDEMPOTENT
+ correlation_search:
+ name: "Test Fake Coorelation Search From Playbook"
+ description: "Test Fake Coorelation Search From Playbook, description edition."
+ search: 'source="/var/log/snort.log"'
+ state: "absent"
+ register: correlation_search_delete_output2
+
+- name: Assert Create splunk.es.correlation_search IDEMPOTENT
+ assert:
+ that:
+ - correlation_search_delete_output2['changed'] == False
+ - correlation_search_delete_output2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases
new file mode 100644
index 000000000..f4c7f6a2b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases
@@ -0,0 +1 @@
+network/splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml
new file mode 100644
index 000000000..87459760e
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml
@@ -0,0 +1,58 @@
+---
+- name: Clean up previous data_input_monitor
+ data_input_monitor:
+ name: "/var/log/messages"
+ state: "absent"
+ recursive: True
+
+- name: Test data_input_monitor - CREATE
+ data_input_monitor:
+ name: "/var/log/messages"
+ state: "present"
+ recursive: True
+ register: data_input_monitor_output
+
+- name: Assert Create splunk.es.data_input_monitor CHANGED
+ assert:
+ that:
+ - data_input_monitor_output['changed'] == True
+ - data_input_monitor_output['failed'] == False
+
+- name: Test data_input_monitor - CREATE IDEMPOTENT
+ data_input_monitor:
+ name: "/var/log/messages"
+ state: "present"
+ recursive: True
+ register: data_input_monitor_output2
+
+- name: Assert Create splunk.es.data_input_monitor CREATE IDEMPOTENT
+ assert:
+ that:
+ - data_input_monitor_output2['changed'] == False
+ - data_input_monitor_output2['failed'] == False
+
+- name: Test data_input_monitor - DELETE
+ data_input_monitor:
+ name: "/var/log/messages"
+ state: "absent"
+ recursive: True
+ register: data_input_monitor_absent_output
+
+- name: Assert Create splunk.es.data_input_monitor CHANGED
+ assert:
+ that:
+ - data_input_monitor_absent_output['changed'] == True
+ - data_input_monitor_absent_output['failed'] == False
+
+- name: Test data_input_monitor - DELETE IDEMPOTENT
+ data_input_monitor:
+ name: "/var/log/messages"
+ state: "absent"
+ recursive: True
+ register: data_input_monitor_absent_output2
+
+- name: Assert Create splunk.es.data_input_monitor DELETE IDEMPOTENT
+ assert:
+ that:
+ - data_input_monitor_absent_output2['changed'] == False
+ - data_input_monitor_absent_output2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases
new file mode 100644
index 000000000..f4c7f6a2b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases
@@ -0,0 +1 @@
+network/splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml
new file mode 100644
index 000000000..5082458c0
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml
@@ -0,0 +1,58 @@
+---
+- name: Cleanup previous data_input_network
+ data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "absent"
+
+- name: Test data_input_network - CREATE
+ data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+ register: data_input_network_output
+
+- name: Assert Create splunk.es.data_input_network CHANGED
+ assert:
+ that:
+ - data_input_network_output is changed
+ - data_input_network_output is not failed
+
+- name: Test data_input_network - CREATE IDEMPOTENT
+ data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+ register: data_input_network_output2
+
+- name: Assert Create splunk.es.data_input_network CREATE IDEMPOTENT
+ assert:
+ that:
+ - data_input_network_output2 is not changed
+ - data_input_network_output2 is not failed
+
+- name: Test data_input_network - DELETE
+ data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "absent"
+ register: data_input_network_absent_output
+
+- name: Assert Create splunk.es.data_input_network CHANGED
+ assert:
+ that:
+ - data_input_network_absent_output is changed
+ - data_input_network_absent_output is not failed
+
+- name: Test data_input_network - DELETE IDEMPOTENT
+ data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "absent"
+ register: data_input_network_absent_output2
+
+- name: Assert Create splunk.es.data_input_network DELETE IDEMPOTENT
+ assert:
+ that:
+ - data_input_network_absent_output2 is not changed
+ - data_input_network_absent_output2 is not failed
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases
new file mode 100644
index 000000000..f4c7f6a2b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases
@@ -0,0 +1 @@
+network/splunk
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml
new file mode 100644
index 000000000..10c0fabcb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml
@@ -0,0 +1,2 @@
+---
+testcase: '*'
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml
new file mode 100644
index 000000000..23d65c7ef
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml
@@ -0,0 +1,2 @@
+---
+dependencies: []
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml
new file mode 100644
index 000000000..dcc81f25f
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml
@@ -0,0 +1,18 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
+
+- name: set test_items
+ set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+
+- name: Run test case (connection=ansible.netcommon.httpapi)
+ include: '{{ test_case_to_run }}'
+ vars:
+ ansible_connection: ansible.netcommon.httpapi
+ with_items: '{{ test_items }}'
+ loop_control:
+ loop_var: test_case_to_run
+ tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml
new file mode 100644
index 000000000..62cc1ae1e
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml
@@ -0,0 +1,7 @@
+---
+- include: cli.yaml
+ tags:
+ - cli
+
+- include: redirection.yaml
+ when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml
new file mode 100644
index 000000000..bafc23a45
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml
@@ -0,0 +1,6 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests/redirection'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml
new file mode 100644
index 000000000..02e9074da
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml
@@ -0,0 +1,49 @@
+---
+- name: create test correlation search
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ description: test description
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+ state: merged
+
+- name: populate notable event adaptive response for test correlation search
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+ state: merged \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml
new file mode 100644
index 000000000..ab4a4a278
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml
@@ -0,0 +1,6 @@
+---
+- name: create test correlation search
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ state: deleted
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml
new file mode 100644
index 000000000..e2fa5c8cf
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml
@@ -0,0 +1,33 @@
+---
+- debug:
+ msg: Start Deleted integration state for adaptive_response_notable_events ansible_connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Delete adaptive response notable events config
+ splunk.es.splunk_adaptive_response_notable_events: &id001
+ config:
+ - correlation_search_name: Ansible Test
+ state: deleted
+ register: result
+
+ - assert:
+ that:
+ - result.changed == true
+ - merged['after'] == result['adaptive_response_notable_events']['before']
+ - merged['before'] == result['adaptive_response_notable_events']['after']
+
+ - name: Delete attributes of all configured interfaces (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events: *id001
+
+ - name: Assert that the previous delete task was idempotent
+ assert:
+ that:
+ - result.changed == false
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml
new file mode 100644
index 000000000..f5003ee06
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml
@@ -0,0 +1,22 @@
+---
+- debug:
+ msg: START adaptive_response_notable_events gathered integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Gather adaptive response notable events config
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ state: gathered
+ register: result
+
+ - assert:
+ that:
+ - merged['after'] == result['adaptive_response_notable_events']['gathered']
+ - result['changed'] == false
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml
new file mode 100644
index 000000000..6c949f830
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml
@@ -0,0 +1,82 @@
+---
+- debug:
+ msg:
+ START Merged adaptive_response_notable_events state for integration tests on connection={{
+ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- block:
+ - name: create test correlation search
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ description: test description
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+ state: merged
+
+ - name: Merge and create new adaptive response notable events configuration
+ tags: merged
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events: &id001
+ state: merged
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+
+ - name: Assert that task reports change and after dict is correctly generated
+ assert:
+ that:
+ - result['changed'] == true
+ - merged['after'] == result['adaptive_response_notable_events']['after']
+
+ - name: Assert that before dicts are correctly generated
+ assert:
+ that:
+ - merged['before'] == result['adaptive_response_notable_events']['before']
+
+ - name: Merge provided configuration with device configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events: *id001
+
+ - name: Assert that the previous task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml
new file mode 100644
index 000000000..47cf117b2
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml
@@ -0,0 +1,53 @@
+---
+- debug:
+ msg: START Replaced adaptive_response_notable_events state for integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Replace existing adaptive response notable events configuration
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events: &id001
+ state: replaced
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ next_steps:
+ - makestreams
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ security_domain: threat
+ severity: high
+
+ - assert:
+ that:
+ - result.changed == true
+ - replaced['before'] == result['adaptive_response_notable_events']['before']
+ - replaced['after'] == result['adaptive_response_notable_events']['after']
+
+ - name:
+ Replaces device configuration of listed adaptive response notable events configuration with
+ provided configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events: *id001
+
+ - name: Assert that task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml
new file mode 100644
index 000000000..dfb936ff0
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml
@@ -0,0 +1,102 @@
+---
+- debug:
+ msg: START adaptive_response_notable_events round trip integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Apply the provided configuration (base config)
+ register: base_config
+ splunk.es.splunk_adaptive_response_notable_events: &id001
+ state: merged
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+
+ - name: Gather adaptive response notable events configuration facts
+ register: gather_result
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ state: gathered
+
+ - name: Apply the configuration which need to be reverted
+ register: result
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ next_steps:
+ - makestreams
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ security_domain: threat
+ severity: high
+ state: replaced
+
+ - assert:
+ that:
+ - result.changed == true
+ - replaced['before'] == result['adaptive_response_notable_events']['before']
+ - replaced['after'] == result['adaptive_response_notable_events']['after']
+
+ - name: Revert back to base config using facts round trip
+ register: revert
+ splunk.es.splunk_adaptive_response_notable_events:
+ config: "{{ gather_result['adaptive_response_notable_events']['gathered'] }}"
+ state: replaced
+
+ - assert:
+ that:
+ - revert['changed'] == true
+ - merged['after'] == revert['adaptive_response_notable_events']['after']
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml
new file mode 100644
index 000000000..8116add0d
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml
@@ -0,0 +1,101 @@
+---
+merged:
+ before: []
+
+ after:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+
+replaced:
+ before:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+ after:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ next_steps:
+ - makestreams
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ security_domain: threat
+ severity: high \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml
new file mode 100644
index 000000000..39b507ff3
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml
@@ -0,0 +1,38 @@
+---
+- name: merge corre config
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: DA-ESS-EndpointProtection
+ annotations:
+ cis20:
+ - test1
+ mitre_attack:
+ - test2
+ kill_chain_phases:
+ - test3
+ nist:
+ - test4
+ custom:
+ - framework: test_framework
+ custom_annotations:
+ - test5
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: realtime
+ schedule_window: 0
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ suppress_alerts: False
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+ state: merged \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml
new file mode 100644
index 000000000..7707f9191
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml
@@ -0,0 +1,6 @@
+---
+- name: delete correlation search
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ state: deleted \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml
new file mode 100644
index 000000000..363f0f3b5
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml
@@ -0,0 +1,33 @@
+---
+- debug:
+ msg: Start Deleted integration state for correlation_searches ansible_connection={{ ansible_connection }}
+
+- include_tasks: _remove_config.yaml
+
+- include_tasks: _populate_config.yaml
+
+- block:
+ - name: Delete correlation searches config
+ splunk.es.splunk_correlation_searches: &id001
+ config:
+ - name: Ansible Test
+ state: deleted
+ register: result
+
+ - assert:
+ that:
+ - result.changed == true
+ - merged['after'] == result['correlation_searches']['before']
+ - merged['before'] == result['correlation_searches']['after']
+
+ - name: Delete attributes of all configured interfaces (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_correlation_searches: *id001
+
+ - name: Assert that the previous delete task was idempotent
+ assert:
+ that:
+ - result.changed == false
+
+ always:
+ - include_tasks: _remove_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml
new file mode 100644
index 000000000..f612ab397
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml
@@ -0,0 +1,23 @@
+---
+- debug:
+ msg: START correlation_searches gathered integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_config.yaml
+
+- include_tasks: _populate_config.yaml
+
+- block:
+ - name: Gather correlation searches config
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ - name: Ansible Test1
+ state: gathered
+ register: result
+
+ - assert:
+ that:
+ - merged['after'] == result['gathered']
+ - result['changed'] == false
+ always:
+ - include_tasks: _remove_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml
new file mode 100644
index 000000000..a83d1aacf
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml
@@ -0,0 +1,70 @@
+---
+- debug:
+ msg: START Merged correlation_searches state for integration tests on connection={{
+ ansible_connection }}
+
+- include_tasks: _remove_config.yaml
+
+- block:
+ - name: Merge and create new correlation searches configuration
+ tags: merged
+ register: result
+ splunk.es.splunk_correlation_searches: &id001
+ state: merged
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: DA-ESS-EndpointProtection
+ annotations:
+ cis20:
+ - test1
+ mitre_attack:
+ - test2
+ kill_chain_phases:
+ - test3
+ nist:
+ - test4
+ custom:
+ - framework: test_framework
+ custom_annotations:
+ - test5
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: realtime
+ schedule_window: 0
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ suppress_alerts: False
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+
+ - name: Assert that task reports change and after dict is correctly generated
+ assert:
+ that:
+ - result['changed'] == true
+ - merged['after'] == result['correlation_searches']['after']
+
+ - name: Assert that before dicts are correctly generated
+ assert:
+ that:
+ - merged['before'] == result['correlation_searches']['before']
+
+ - name: Merge provided configuration with device configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_correlation_searches: *id001
+
+ - name: Assert that the previous task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml
new file mode 100644
index 000000000..a41649a5b
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml
@@ -0,0 +1,72 @@
+---
+- debug:
+ msg: START Replaced correlation_searches state for integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_config.yaml
+- include_tasks: _populate_config.yaml
+
+- block:
+
+ - name: Replace existing correlation searches configuration
+ register: result
+ splunk.es.splunk_correlation_searches: &id001
+ state: replaced
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: SplunkEnterpriseSecuritySuite
+ annotations:
+ cis20:
+ - test1
+ - test2
+ mitre_attack:
+ - test3
+ - test4
+ kill_chain_phases:
+ - test5
+ - test6
+ nist:
+ - test7
+ - test8
+ custom:
+ - framework: test_framework2
+ custom_annotations:
+ - test9
+ - test10
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: continuous
+ schedule_window: auto
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ - test_field2
+ suppress_alerts: True
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+
+ - assert:
+ that:
+ - result.changed == true
+ - replaced['before'] == result['correlation_searches']['before']
+ - replaced['after'] == result['correlation_searches']['after']
+
+ - name: Replaces device configuration of listed data inputs networks configuration with
+ provided configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_correlation_searches: *id001
+
+ - name: Assert that task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml
new file mode 100644
index 000000000..151e7305a
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml
@@ -0,0 +1,118 @@
+---
+- debug:
+ msg: START correlation_searches round trip integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_config.yaml
+- include_tasks: _populate_config.yaml
+
+- block:
+ - name: Apply the provided configuration (base config)
+ register: base_config
+ splunk.es.splunk_correlation_searches: &id001
+ state: merged
+ config:
+ - name: Ansible Test 3
+ disabled: false
+ description: test description
+ app: DA-ESS-EndpointProtection
+ annotations:
+ cis20:
+ - test1
+ mitre_attack:
+ - test2
+ kill_chain_phases:
+ - test3
+ nist:
+ - test4
+ custom:
+ - framework: test_framework
+ custom_annotations:
+ - test5
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: realtime
+ schedule_window: 0
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ suppress_alerts: False
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+
+ - name: Gather correlation searches configuration facts
+ register: gather_result
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ state: gathered
+
+ - name: Apply the configuration which need to be reverted
+ register: result
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: SplunkEnterpriseSecuritySuite
+ annotations:
+ cis20:
+ - test1
+ - test2
+ mitre_attack:
+ - test3
+ - test4
+ kill_chain_phases:
+ - test5
+ - test6
+ nist:
+ - test7
+ - test8
+ custom:
+ - framework: test_framework2
+ custom_annotations:
+ - test9
+ - test10
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: continuous
+ schedule_window: auto
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ - test_field2
+ suppress_alerts: True
+ search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
+ state: replaced
+
+ - assert:
+ that:
+ - result.changed == true
+ - replaced['before'] == result['correlation_searches']['before']
+ - replaced['after'] == result['correlation_searches']['after']
+
+ - name: Revert back to base config using facts round trip
+ register: revert
+ splunk.es.splunk_correlation_searches:
+ config: "{{ gather_result['gathered'] }}"
+ state: replaced
+
+ - assert:
+ that:
+ - revert['changed'] == true
+ - merged['after'] == revert['correlation_searches']['after']
+
+ always:
+ - include_tasks: _remove_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml
new file mode 100644
index 000000000..10c0fabcb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml
@@ -0,0 +1,2 @@
+---
+testcase: '*'
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml
new file mode 100644
index 000000000..23d65c7ef
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml
@@ -0,0 +1,2 @@
+---
+dependencies: []
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml
new file mode 100644
index 000000000..dcc81f25f
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml
@@ -0,0 +1,18 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
+
+- name: set test_items
+ set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+
+- name: Run test case (connection=ansible.netcommon.httpapi)
+ include: '{{ test_case_to_run }}'
+ vars:
+ ansible_connection: ansible.netcommon.httpapi
+ with_items: '{{ test_items }}'
+ loop_control:
+ loop_var: test_case_to_run
+ tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml
new file mode 100644
index 000000000..62cc1ae1e
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml
@@ -0,0 +1,7 @@
+---
+- include: cli.yaml
+ tags:
+ - cli
+
+- include: redirection.yaml
+ when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml
new file mode 100644
index 000000000..bafc23a45
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml
@@ -0,0 +1,6 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests/redirection'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml
new file mode 100644
index 000000000..2bb0129a4
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml
@@ -0,0 +1,22 @@
+---
+- name: Populate data inputs config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z]/gm'
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ disabled: False
+ follow_tail: False
+ host: "$decideOnStartup"
+ host_regex: "/(test_host)/gm"
+ host_segment: 3
+ ignore_older_than: 5d
+ index: default
+ recursive: True
+ rename_source: test
+ sourcetype: test_source_type
+ time_before_close: 4
+ whitelist: '/\/var\/log\/[a-z]/gm'
+ state: merged \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml
new file mode 100644
index 000000000..d0fdb2d90
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml
@@ -0,0 +1,6 @@
+---
+- name: Delete data inputs config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ state: deleted \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml
new file mode 100644
index 000000000..8f19b500f
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml
@@ -0,0 +1,36 @@
+---
+- debug:
+ msg: Start Deleted integration state for data_inputs_monitors ansible_connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Delete data inputs monitors config
+ splunk.es.splunk_data_inputs_monitor: &id001
+ config:
+ - name: /var/log
+ state: deleted
+ register: result
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ merged['after'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['before'][0] |\
+ dict2items)| length==5}}"
+ - merged['before'] == result['data_inputs_monitor']['after']
+
+ - name: Delete attributes of all configured interfaces (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_monitor: *id001
+
+ - name: Assert that the previous delete task was idempotent
+ assert:
+ that:
+ - result.changed == false
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml
new file mode 100644
index 000000000..84aae2076
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml
@@ -0,0 +1,25 @@
+---
+- debug:
+ msg: START data_inputs_monitors gathered integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+ - name: Gather data inputs monitors config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ state: gathered
+ register: result
+
+ - assert:
+ that:
+ - "{{ merged['after'] | dict2items |\
+ symmetric_difference(result['gathered'][0] |\
+ dict2items)| length==5}}"
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml
new file mode 100644
index 000000000..0388c26c1
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml
@@ -0,0 +1,57 @@
+---
+- debug:
+ msg: START Merged data_inputs_monitor state for integration tests on connection={{
+ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+
+- block:
+ - name: Merge and create new data inputs monitors configuration
+ tags: merged
+ register: result
+ splunk.es.splunk_data_inputs_monitor: &id001
+ state: merged
+ config:
+ - name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z]/gm'
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ disabled: False
+ follow_tail: False
+ host: "$decideOnStartup"
+ host_regex: "/(test_host)/gm"
+ host_segment: 3
+ ignore_older_than: 5d
+ index: default
+ recursive: True
+ rename_source: test
+ sourcetype: test_source_type
+ time_before_close: 4
+ whitelist: '/\/var\/log\/[a-z]/gm'
+
+ - name: Assert that task reports change and after dict is correctly generated
+ assert:
+ that:
+ - result['changed'] == true
+ - "{{ merged['after'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['after'][0] |\
+ dict2items)| length==5}}"
+
+ - name: Assert that before dicts are correctly generated
+ assert:
+ that:
+ - merged['before'] == result['data_inputs_monitor']['before']
+
+ - name: Merge provided configuration with device configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_monitor: *id001
+
+ - name: Assert that the previous task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+
+ - include_tasks: _remove_dim_config.yaml \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml
new file mode 100644
index 000000000..7a9dd8c46
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml
@@ -0,0 +1,43 @@
+---
+- debug:
+ msg: START Replaced data_inputs_monitor state for integration tests on connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_dim_config.yaml
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+
+ - name: Replace existing data inputs monitors configuration
+ register: result
+ splunk.es.splunk_data_inputs_monitor: &id001
+ state: replaced
+ config:
+ - name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z0-9]/gm'
+ crc_salt: <SOURCE>
+
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ replaced['before'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['before'][0] |\
+ dict2items) | length==5}}"
+ - "{{ replaced['after'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['after'][0] |\
+ dict2items) | length==3}}"
+
+ - name: Replaces device configuration of listed data inputs networks configuration with
+ provided configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_monitor: *id001
+
+ - name: Assert that task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml
new file mode 100644
index 000000000..4025c446c
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml
@@ -0,0 +1,73 @@
+---
+- debug:
+ msg: START data_inputs_monitor round trip integration tests on connection={{ ansible_connection }}
+
+- include_tasks: _remove_dim_config.yaml
+- include_tasks: _populate_dim_config.yaml
+
+- block:
+
+ - name: Apply the provided configuration (base config)
+ register: base_config
+ splunk.es.splunk_data_inputs_monitor: &id001
+ state: merged
+ config:
+ - name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z]/gm'
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ disabled: False
+ follow_tail: False
+ host: "$decideOnStartup"
+ host_regex: "/(test_host)/gm"
+ host_segment: 3
+ ignore_older_than: 5d
+ index: default
+ recursive: True
+ rename_source: test
+ sourcetype: test_source_type
+ time_before_close: 4
+ whitelist: '/\/var\/log\/[a-z]/gm'
+
+ - name: Gather data inputs monitors configuration facts
+ register: gather_result
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ state: gathered
+
+ - name: Apply the configuration which need to be reverted
+ register: result
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z0-9]/gm'
+ crc_salt: <SOURCE>
+ state: replaced
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ replaced['before'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['before'][0] |\
+ dict2items) | length==5}}"
+ - "{{ replaced['after'] | dict2items |\
+ symmetric_difference(result['data_inputs_monitor']['after'][0] |\
+ dict2items) | length==3}}"
+
+ - name: Revert back to base config using facts round trip
+ register: revert
+ splunk.es.splunk_data_inputs_monitor:
+ config: "{{ gather_result['gathered'] }}"
+ state: replaced
+
+ - assert:
+ that:
+ - revert['changed'] == true
+ - "{{ merged['after'] | dict2items |\
+ symmetric_difference(revert['data_inputs_monitor']['after'][0] |\
+ dict2items)| length==5}}"
+
+ always:
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml
new file mode 100644
index 000000000..881a750b4
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml
@@ -0,0 +1,46 @@
+---
+merged:
+ before: []
+
+ after:
+ name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z]/gm'
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ disabled: False
+ follow_tail: False
+ host: "$decideOnStartup"
+ host_regex: "/(test_host)/gm"
+ host_segment: 3
+ ignore_older_than: 5d
+ index: default
+ recursive: True
+ rename_source: test
+ sourcetype: test_source_type
+ time_before_close:
+ whitelist: '/\/var\/log\/[a-z]/gm'
+
+replaced:
+ before:
+ name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z]/gm'
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ disabled: False
+ follow_tail: False
+ host: "$decideOnStartup"
+ host_regex: "/(test_host)/gm"
+ host_segment: 3
+ ignore_older_than: 5d
+ index: default
+ recursive: True
+ rename_source: test
+ sourcetype: test_source_type
+ time_before_close:
+ whitelist: '/\/var\/log\/[a-z]/gm'
+ after:
+ name: "/var/log"
+ blacklist: '/\/var\/log\/[a-z0-9]/gm'
+ crc_salt: <SOURCE> \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml
new file mode 100644
index 000000000..10c0fabcb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml
@@ -0,0 +1,2 @@
+---
+testcase: '*'
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml
new file mode 100644
index 000000000..23d65c7ef
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml
@@ -0,0 +1,2 @@
+---
+dependencies: []
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml
new file mode 100644
index 000000000..dcc81f25f
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml
@@ -0,0 +1,18 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
+
+- name: set test_items
+ set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+
+- name: Run test case (connection=ansible.netcommon.httpapi)
+ include: '{{ test_case_to_run }}'
+ vars:
+ ansible_connection: ansible.netcommon.httpapi
+ with_items: '{{ test_items }}'
+ loop_control:
+ loop_var: test_case_to_run
+ tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml
new file mode 100644
index 000000000..62cc1ae1e
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml
@@ -0,0 +1,7 @@
+---
+- include: cli.yaml
+ tags:
+ - cli
+
+- include: redirection.yaml
+ when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml
new file mode 100644
index 000000000..bafc23a45
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml
@@ -0,0 +1,6 @@
+---
+- name: collect all test cases
+ find:
+ paths: '{{ role_path }}/tests/redirection'
+ patterns: '{{ testcase }}.yaml'
+ register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml
new file mode 100644
index 000000000..60f87afbf
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml
@@ -0,0 +1,43 @@
+---
+- name: merge data inputs config
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: False
+ host: "$decideOnStartup"
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: "01234567-0123-0123-0123-012345678901"
+ - protocol: tcp
+ datatype: ssl
+ name: test_host
+ - protocol: udp
+ name: 7890
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ no_appending_timestamp: True
+ no_priority_stripping: True
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ state: merged \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml
new file mode 100644
index 000000000..bf904c27d
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml
@@ -0,0 +1,16 @@
+---
+- name: delete data inputs config
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ - protocol: udp
+ name: default:7890
+ state: deleted \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml
new file mode 100644
index 000000000..08974bab5
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml
@@ -0,0 +1,47 @@
+---
+- debug:
+ msg:
+ Start Deleted integration state for data_inputs_network ansible_connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_din_config.yaml
+
+- include_tasks: _populate_din_config.yaml
+
+- block:
+ - name: Delete data inputs networks config
+ splunk.es.splunk_data_inputs_network: &id001
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: "01234567-0123-0123-0123-012345678901"
+ - protocol: udp
+ name: default:7890
+ state: deleted
+ register: result
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ merged['after'] | symmetric_difference(result['data_inputs_network']['before']) |\
+ \ length == 1 }}"
+ - merged['before'] == result['data_inputs_network']['after']
+
+ - name: Delete attributes of all configured interfaces (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_network: *id001
+
+ - name: Assert that the previous delete task was idempotent
+ assert:
+ that:
+ - result.changed == false
+
+ always:
+ - include_tasks: _remove_din_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml
new file mode 100644
index 000000000..252ddc7df
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml
@@ -0,0 +1,38 @@
+---
+- debug:
+ msg:
+ START data_inputs_network gathered integration tests on connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_din_config.yaml
+
+- include_tasks: _populate_din_config.yaml
+
+- block:
+ - name: Gather data inputs networks config
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ - protocol: tcp
+ datatype: ssl
+ name: test_host
+ - protocol: udp
+ name: default:7890
+ state: gathered
+ register: result
+
+ - assert:
+ that:
+ - "{{ merged['after'] | symmetric_difference(result['gathered']) |\
+ \ length == 0 }}"
+ - result['changed'] == false
+ always:
+ - include_tasks: _remove_din_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml
new file mode 100644
index 000000000..842524ec6
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml
@@ -0,0 +1,77 @@
+---
+- debug:
+ msg:
+ START Merged data_inputs_network state for integration tests on connection={{
+ ansible_connection }}
+
+- include_tasks: _remove_din_config.yaml
+
+- block:
+ - name: Merge and create new data inputs networks configuration
+ tags: merged
+ register: result
+ splunk.es.splunk_data_inputs_network: &id001
+ state: merged
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: False
+ host: "$decideOnStartup"
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: 01234567-0123-0123-0123-012345678901
+ - protocol: tcp
+ datatype: ssl
+ name: test_host
+ - protocol: udp
+ name: 7890
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ no_appending_timestamp: True
+ no_priority_stripping: True
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+
+ - name: Assert that task reports change and after dict is correctly generated
+ assert:
+ that:
+ - result['changed'] == true
+ - "{{ merged['after'] | symmetric_difference(result['data_inputs_network']['after']) |\
+ \ length == 0 }}"
+
+ - name: Assert that before dicts are correctly generated
+ assert:
+ that:
+ - merged['before_merged'] == result['data_inputs_network']['before']
+
+ - name: Merge provided configuration with device configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_network: *id001
+
+ - name: Assert that the previous task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_din_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml
new file mode 100644
index 000000000..340df5282
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml
@@ -0,0 +1,72 @@
+---
+- debug:
+ msg:
+ START Replaced data_inputs_network state for integration tests on connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_din_config.yaml
+- include_tasks: _populate_din_config.yaml
+
+- block:
+ - name: Replace existing data inputs networks configuration
+ register: result
+ splunk.es.splunk_data_inputs_network: &id001
+ state: replaced
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 10
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: 01234567-0123-0123-0123-012345678900
+ - protocol: udp
+ name: 7890
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ no_appending_timestamp: False
+ no_priority_stripping: False
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ replaced['before'] | symmetric_difference(result['data_inputs_network']['before']) |\
+ \ length == 0 }}"
+ - "{{ replaced['after'] | symmetric_difference(result['data_inputs_network']['after']) |\
+ \ length == 0 }}"
+
+ - name:
+ Replaces device configuration of listed data inputs networks configuration with
+ provided configuration (IDEMPOTENT)
+ register: result
+ splunk.es.splunk_data_inputs_network: *id001
+
+ - name: Assert that task was idempotent
+ assert:
+ that:
+ - result['changed'] == false
+
+ always:
+ - include_tasks: _remove_din_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml
new file mode 100644
index 000000000..1fa3e577c
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml
@@ -0,0 +1,131 @@
+---
+- debug:
+ msg:
+ START data_inputs_network round trip integration tests on connection={{ ansible_connection
+ }}
+
+- include_tasks: _remove_din_config.yaml
+- include_tasks: _populate_din_config.yaml
+
+- block:
+ - name: Apply the provided configuration (base config)
+ register: base_config
+ splunk.es.splunk_data_inputs_network: &id001
+ state: merged
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: False
+ host: "$decideOnStartup"
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: 01234567-0123-0123-0123-012345678901
+ - protocol: udp
+ name: 7890
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ no_appending_timestamp: True
+ no_priority_stripping: True
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+
+ - name: Gather data inputs networks configuration facts
+ register: gather_result
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ - protocol: udp
+ name: default:7890
+ state: gathered
+
+ - name: Apply the configuration which need to be reverted
+ register: result
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 10
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ token: 01234567-0123-0123-0123-012345678900
+ - protocol: udp
+ name: 7890
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ no_appending_timestamp: False
+ no_priority_stripping: False
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ state: replaced
+
+ - assert:
+ that:
+ - result.changed == true
+ - "{{ replaced['before'] | symmetric_difference(result['data_inputs_network']['before']) |\
+ \ length == 0 }}"
+ - "{{ replaced['after'] | symmetric_difference(result['data_inputs_network']['after']) |\
+ \ length == 0 }}"
+
+ - name: Revert back to base config using facts round trip
+ register: revert
+ splunk.es.splunk_data_inputs_network:
+ config: "{{ gather_result['gathered'] }}"
+ state: replaced
+
+ - assert:
+ that:
+ - revert['changed'] == true
+ - "{{ merged['after'] | symmetric_difference(revert['data_inputs_network']['after']) |\
+ \ length == 1 }}"
+
+ always:
+ - include_tasks: _remove_din_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml
new file mode 100644
index 000000000..942b75851
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml
@@ -0,0 +1,129 @@
+---
+merged:
+ before: []
+ before_merged:
+ - cipher_suite: ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
+ datatype: ssl
+ disabled: true
+ host: $decideOnStartup
+ index: default
+ name: test_host
+ protocol: tcp
+
+ after:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ connection_host: ip
+ disabled: False
+ host: $decideOnStartup
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: splunktcptoken://test_token
+ token: 01234567-0123-0123-0123-012345678901
+ - protocol: tcp
+ datatype: ssl
+ name: test_host
+ cipher_suite: ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
+ disabled: true
+ host: $decideOnStartup
+ index: default
+ - protocol: udp
+ name: default:7890
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ no_appending_timestamp: True
+ no_priority_stripping: True
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+
+replaced:
+ before:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ connection_host: ip
+ disabled: False
+ host: $decideOnStartup
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: splunktcptoken://test_token
+ token: 01234567-0123-0123-0123-012345678901
+ - protocol: udp
+ name: default:7890
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ no_appending_timestamp: True
+ no_priority_stripping: True
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ after:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 10
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+ - protocol: tcp
+ datatype: cooked
+ name: default:8101
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ restrict_to_host: default
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: splunktcptoken://test_token
+ token: 01234567-0123-0123-0123-012345678900
+ - protocol: udp
+ name: default:7890
+ connection_host: ip
+ disabled: True
+ host: $decideOnStartup
+ index: default
+ no_appending_timestamp: False
+ no_priority_stripping: False
+ queue: parsingQueue
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt
new file mode 100644
index 000000000..16b4372f7
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt
@@ -0,0 +1 @@
+plugins/action/splunk_correlation_searches.py compile-2.6!skip
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt
new file mode 100644
index 000000000..16b4372f7
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt
@@ -0,0 +1 @@
+plugins/action/splunk_correlation_searches.py compile-2.6!skip
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt
new file mode 100644
index 000000000..ed0da94eb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt
@@ -0,0 +1,9 @@
+plugins/action/splunk_correlation_searches.py compile-2.6!skip
+plugins/modules/correlation_search.py validate-modules:deprecation-mismatch
+plugins/modules/correlation_search.py validate-modules:invalid-documentation
+plugins/modules/data_input_monitor.py validate-modules:deprecation-mismatch
+plugins/modules/data_input_monitor.py validate-modules:invalid-documentation
+plugins/modules/data_input_network.py validate-modules:deprecation-mismatch
+plugins/modules/data_input_network.py validate-modules:invalid-documentation
+plugins/modules/adaptive_response_notable_event.py validate-modules:deprecation-mismatch
+plugins/modules/adaptive_response_notable_event.py validate-modules:invalid-documentation \ No newline at end of file
diff --git a/ansible_collections/splunk/es/tests/unit/__init__.py b/ansible_collections/splunk/es/tests/unit/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/compat/__init__.py b/ansible_collections/splunk/es/tests/unit/compat/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/compat/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/compat/builtins.py b/ansible_collections/splunk/es/tests/unit/compat/builtins.py
new file mode 100644
index 000000000..bfc8adfbe
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/compat/builtins.py
@@ -0,0 +1,34 @@
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+#
+# Compat for python2.7
+#
+
+# One unittest needs to import builtins via __import__() so we need to have
+# the string that represents it
+try:
+ import __builtin__
+except ImportError:
+ BUILTINS = "builtins"
+else:
+ BUILTINS = "__builtin__"
diff --git a/ansible_collections/splunk/es/tests/unit/compat/mock.py b/ansible_collections/splunk/es/tests/unit/compat/mock.py
new file mode 100644
index 000000000..2ea98a17f
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/compat/mock.py
@@ -0,0 +1,128 @@
+# pylint: skip-file
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+"""
+Compat module for Python3.x's unittest.mock module
+"""
+import sys
+
+# Python 2.7
+
+# Note: Could use the pypi mock library on python3.x as well as python2.x. It
+# is the same as the python3 stdlib mock library
+
+try:
+ # Allow wildcard import because we really do want to import all of mock's
+ # symbols into this compat shim
+ # pylint: disable=wildcard-import,unused-wildcard-import
+ from unittest.mock import *
+except ImportError:
+ # Python 2
+ # pylint: disable=wildcard-import,unused-wildcard-import
+ try:
+ from mock import *
+ except ImportError:
+ print("You need the mock library installed on python2.x to run tests")
+
+
+# Prior to 3.4.4, mock_open cannot handle binary read_data
+if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
+ file_spec = None
+
+ def _iterate_read_data(read_data):
+ # Helper for mock_open:
+ # Retrieve lines from read_data via a generator so that separate calls to
+ # readline, read, and readlines are properly interleaved
+ sep = b"\n" if isinstance(read_data, bytes) else "\n"
+ data_as_list = [l + sep for l in read_data.split(sep)]
+
+ if data_as_list[-1] == sep:
+ # If the last line ended in a newline, the list comprehension will have an
+ # extra entry that's just a newline. Remove this.
+ data_as_list = data_as_list[:-1]
+ else:
+ # If there wasn't an extra newline by itself, then the file being
+ # emulated doesn't have a newline to end the last line remove the
+ # newline that our naive format() added
+ data_as_list[-1] = data_as_list[-1][:-1]
+
+ for line in data_as_list:
+ yield line
+
+ def mock_open(mock=None, read_data=""):
+ """
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` methoddline`, and `readlines` of the
+ file handle to return. This is an empty string by default.
+ """
+
+ def _readlines_side_effect(*args, **kwargs):
+ if handle.readlines.return_value is not None:
+ return handle.readlines.return_value
+ return list(_data)
+
+ def _read_side_effect(*args, **kwargs):
+ if handle.read.return_value is not None:
+ return handle.read.return_value
+ return type(read_data)().join(_data)
+
+ def _readline_side_effect():
+ if handle.readline.return_value is not None:
+ while True:
+ yield handle.readline.return_value
+ for line in _data:
+ yield line
+
+ global file_spec
+ if file_spec is None:
+ import _io
+
+ file_spec = list(
+ set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))
+ )
+
+ if mock is None:
+ mock = MagicMock(name="open", spec=open)
+
+ handle = MagicMock(spec=file_spec)
+ handle.__enter__.return_value = handle
+
+ _data = _iterate_read_data(read_data)
+
+ handle.write.return_value = None
+ handle.read.return_value = None
+ handle.readline.return_value = None
+ handle.readlines.return_value = None
+
+ handle.read.side_effect = _read_side_effect
+ handle.readline.side_effect = _readline_side_effect()
+ handle.readlines.side_effect = _readlines_side_effect
+
+ mock.return_value = handle
+ return mock
diff --git a/ansible_collections/splunk/es/tests/unit/compat/unittest.py b/ansible_collections/splunk/es/tests/unit/compat/unittest.py
new file mode 100644
index 000000000..df3379b82
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/compat/unittest.py
@@ -0,0 +1,39 @@
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+"""
+Compat module for Python2.7's unittest module
+"""
+
+import sys
+
+# Allow wildcard import because we really do want to import all of
+# unittests's symbols into this compat shim
+# pylint: disable=wildcard-import,unused-wildcard-import
+if sys.version_info < (2, 7):
+ try:
+ # Need unittest2 on python2.6
+ from unittest2 import *
+ except ImportError:
+ print("You need unittest2 installed on python2.6.x to run tests")
+else:
+ from unittest import *
diff --git a/ansible_collections/splunk/es/tests/unit/mock/__init__.py b/ansible_collections/splunk/es/tests/unit/mock/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/mock/loader.py b/ansible_collections/splunk/es/tests/unit/mock/loader.py
new file mode 100644
index 000000000..19c44a7e8
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/loader.py
@@ -0,0 +1,116 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import os
+
+from ansible.errors import AnsibleParserError
+from ansible.parsing.dataloader import DataLoader
+from ansible.module_utils._text import to_bytes, to_text
+
+
+class DictDataLoader(DataLoader):
+ def __init__(self, file_mapping=None):
+ file_mapping = {} if file_mapping is None else file_mapping
+ assert type(file_mapping) == dict
+
+ super(DictDataLoader, self).__init__()
+
+ self._file_mapping = file_mapping
+ self._build_known_directories()
+ self._vault_secrets = None
+
+ def load_from_file(self, path, cache=True, unsafe=False):
+ path = to_text(path)
+ if path in self._file_mapping:
+ return self.load(self._file_mapping[path], path)
+ return None
+
+ # TODO: the real _get_file_contents returns a bytestring, so we actually convert the
+ # unicode/text it's created with to utf-8
+ def _get_file_contents(self, file_name):
+ path = to_text(file_name)
+ if path in self._file_mapping:
+ return (to_bytes(self._file_mapping[path]), False)
+ else:
+ raise AnsibleParserError("file not found: %s" % path)
+
+ def path_exists(self, path):
+ path = to_text(path)
+ return path in self._file_mapping or path in self._known_directories
+
+ def is_file(self, path):
+ path = to_text(path)
+ return path in self._file_mapping
+
+ def is_directory(self, path):
+ path = to_text(path)
+ return path in self._known_directories
+
+ def list_directory(self, path):
+ ret = []
+ path = to_text(path)
+ for x in list(self._file_mapping.keys()) + self._known_directories:
+ if x.startswith(path):
+ if os.path.dirname(x) == path:
+ ret.append(os.path.basename(x))
+ return ret
+
+ def is_executable(self, path):
+ # FIXME: figure out a way to make paths return true for this
+ return False
+
+ def _add_known_directory(self, directory):
+ if directory not in self._known_directories:
+ self._known_directories.append(directory)
+
+ def _build_known_directories(self):
+ self._known_directories = []
+ for path in self._file_mapping:
+ dirname = os.path.dirname(path)
+ while dirname not in ("/", ""):
+ self._add_known_directory(dirname)
+ dirname = os.path.dirname(dirname)
+
+ def push(self, path, content):
+ rebuild_dirs = False
+ if path not in self._file_mapping:
+ rebuild_dirs = True
+
+ self._file_mapping[path] = content
+
+ if rebuild_dirs:
+ self._build_known_directories()
+
+ def pop(self, path):
+ if path in self._file_mapping:
+ del self._file_mapping[path]
+ self._build_known_directories()
+
+ def clear(self):
+ self._file_mapping = dict()
+ self._known_directories = []
+
+ def get_basedir(self):
+ return os.getcwd()
+
+ def set_vault_secrets(self, vault_secrets):
+ self._vault_secrets = vault_secrets
diff --git a/ansible_collections/splunk/es/tests/unit/mock/path.py b/ansible_collections/splunk/es/tests/unit/mock/path.py
new file mode 100644
index 000000000..1e5902864
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/path.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import (
+ MagicMock,
+)
+from ansible.utils.path import unfrackpath
+
+
+mock_unfrackpath_noop = MagicMock(
+ spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x
+)
diff --git a/ansible_collections/splunk/es/tests/unit/mock/procenv.py b/ansible_collections/splunk/es/tests/unit/mock/procenv.py
new file mode 100644
index 000000000..f7ab5fe91
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/procenv.py
@@ -0,0 +1,94 @@
+# (c) 2016, Matt Davis <mdavis@ansible.com>
+# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import sys
+import json
+
+from contextlib import contextmanager
+from io import BytesIO, StringIO
+from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
+from ansible.module_utils.six import PY3
+from ansible.module_utils._text import to_bytes
+
+
+@contextmanager
+def swap_stdin_and_argv(stdin_data="", argv_data=tuple()):
+ """
+ context manager that temporarily masks the test runner's values for stdin and argv
+ """
+ real_stdin = sys.stdin
+ real_argv = sys.argv
+
+ if PY3:
+ fake_stream = StringIO(stdin_data)
+ fake_stream.buffer = BytesIO(to_bytes(stdin_data))
+ else:
+ fake_stream = BytesIO(to_bytes(stdin_data))
+
+ try:
+ sys.stdin = fake_stream
+ sys.argv = argv_data
+
+ yield
+ finally:
+ sys.stdin = real_stdin
+ sys.argv = real_argv
+
+
+@contextmanager
+def swap_stdout():
+ """
+ context manager that temporarily replaces stdout for tests that need to verify output
+ """
+ old_stdout = sys.stdout
+
+ if PY3:
+ fake_stream = StringIO()
+ else:
+ fake_stream = BytesIO()
+
+ try:
+ sys.stdout = fake_stream
+
+ yield fake_stream
+ finally:
+ sys.stdout = old_stdout
+
+
+class ModuleTestCase(unittest.TestCase):
+ def setUp(self, module_args=None):
+ if module_args is None:
+ module_args = {
+ "_ansible_remote_tmp": "/tmp",
+ "_ansible_keep_remote_files": False,
+ }
+
+ args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
+
+ # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
+ self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
+ self.stdin_swap.__enter__()
+
+ def tearDown(self):
+ # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
+ self.stdin_swap.__exit__(None, None, None)
diff --git a/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py
new file mode 100644
index 000000000..b34ae1340
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py
@@ -0,0 +1,42 @@
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible.module_utils._text import to_bytes
+
+from ansible.parsing.vault import VaultSecret
+
+
+class TextVaultSecret(VaultSecret):
+ """A secret piece of text. ie, a password. Tracks text encoding.
+
+ The text encoding of the text may not be the default text encoding so
+ we keep track of the encoding so we encode it to the same bytes."""
+
+ def __init__(self, text, encoding=None, errors=None, _bytes=None):
+ super(TextVaultSecret, self).__init__()
+ self.text = text
+ self.encoding = encoding or "utf-8"
+ self._bytes = _bytes
+ self.errors = errors or "strict"
+
+ @property
+ def bytes(self):
+ """The text encoded with encoding, unless we specifically set _bytes."""
+ return self._bytes or to_bytes(
+ self.text, encoding=self.encoding, errors=self.errors
+ )
diff --git a/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py
new file mode 100644
index 000000000..5df30aaed
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py
@@ -0,0 +1,167 @@
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+import io
+import yaml
+
+from ansible.module_utils.six import PY3
+from ansible.parsing.yaml.loader import AnsibleLoader
+from ansible.parsing.yaml.dumper import AnsibleDumper
+
+
+class YamlTestUtils(object):
+ """Mixin class to combine with a unittest.TestCase subclass."""
+
+ def _loader(self, stream):
+ """Vault related tests will want to override this.
+
+ Vault cases should setup a AnsibleLoader that has the vault password."""
+ return AnsibleLoader(stream)
+
+ def _dump_stream(self, obj, stream, dumper=None):
+ """Dump to a py2-unicode or py3-string stream."""
+ if PY3:
+ return yaml.dump(obj, stream, Dumper=dumper)
+ else:
+ return yaml.dump(obj, stream, Dumper=dumper, encoding=None)
+
+ def _dump_string(self, obj, dumper=None):
+ """Dump to a py2-unicode or py3-string"""
+ if PY3:
+ return yaml.dump(obj, Dumper=dumper)
+ else:
+ return yaml.dump(obj, Dumper=dumper, encoding=None)
+
+ def _dump_load_cycle(self, obj):
+ # Each pass though a dump or load revs the 'generation'
+ # obj to yaml string
+ string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper)
+
+ # wrap a stream/file like StringIO around that yaml
+ stream_from_object_dump = io.StringIO(string_from_object_dump)
+ loader = self._loader(stream_from_object_dump)
+ # load the yaml stream to create a new instance of the object (gen 2)
+ obj_2 = loader.get_data()
+
+ # dump the gen 2 objects directory to strings
+ string_from_object_dump_2 = self._dump_string(
+ obj_2, dumper=AnsibleDumper
+ )
+
+ # The gen 1 and gen 2 yaml strings
+ self.assertEqual(string_from_object_dump, string_from_object_dump_2)
+ # the gen 1 (orig) and gen 2 py object
+ self.assertEqual(obj, obj_2)
+
+ # again! gen 3... load strings into py objects
+ stream_3 = io.StringIO(string_from_object_dump_2)
+ loader_3 = self._loader(stream_3)
+ obj_3 = loader_3.get_data()
+
+ string_from_object_dump_3 = self._dump_string(
+ obj_3, dumper=AnsibleDumper
+ )
+
+ self.assertEqual(obj, obj_3)
+ # should be transitive, but...
+ self.assertEqual(obj_2, obj_3)
+ self.assertEqual(string_from_object_dump, string_from_object_dump_3)
+
+ def _old_dump_load_cycle(self, obj):
+ """Dump the passed in object to yaml, load it back up, dump again, compare."""
+ stream = io.StringIO()
+
+ yaml_string = self._dump_string(obj, dumper=AnsibleDumper)
+ self._dump_stream(obj, stream, dumper=AnsibleDumper)
+
+ yaml_string_from_stream = stream.getvalue()
+
+ # reset stream
+ stream.seek(0)
+
+ loader = self._loader(stream)
+ # loader = AnsibleLoader(stream, vault_password=self.vault_password)
+ obj_from_stream = loader.get_data()
+
+ stream_from_string = io.StringIO(yaml_string)
+ loader2 = self._loader(stream_from_string)
+ # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)
+ obj_from_string = loader2.get_data()
+
+ stream_obj_from_stream = io.StringIO()
+ stream_obj_from_string = io.StringIO()
+
+ if PY3:
+ yaml.dump(
+ obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper
+ )
+ yaml.dump(
+ obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper
+ )
+ else:
+ yaml.dump(
+ obj_from_stream,
+ stream_obj_from_stream,
+ Dumper=AnsibleDumper,
+ encoding=None,
+ )
+ yaml.dump(
+ obj_from_stream,
+ stream_obj_from_string,
+ Dumper=AnsibleDumper,
+ encoding=None,
+ )
+
+ yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue()
+ yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue()
+
+ stream_obj_from_stream.seek(0)
+ stream_obj_from_string.seek(0)
+
+ if PY3:
+ yaml_string_obj_from_stream = yaml.dump(
+ obj_from_stream, Dumper=AnsibleDumper
+ )
+ yaml_string_obj_from_string = yaml.dump(
+ obj_from_string, Dumper=AnsibleDumper
+ )
+ else:
+ yaml_string_obj_from_stream = yaml.dump(
+ obj_from_stream, Dumper=AnsibleDumper, encoding=None
+ )
+ yaml_string_obj_from_string = yaml.dump(
+ obj_from_string, Dumper=AnsibleDumper, encoding=None
+ )
+
+ assert yaml_string == yaml_string_obj_from_stream
+ assert (
+ yaml_string
+ == yaml_string_obj_from_stream
+ == yaml_string_obj_from_string
+ )
+ assert (
+ yaml_string
+ == yaml_string_obj_from_stream
+ == yaml_string_obj_from_string
+ == yaml_string_stream_obj_from_stream
+ == yaml_string_stream_obj_from_string
+ )
+ assert obj == obj_from_stream
+ assert obj == obj_from_string
+ assert obj == yaml_string_obj_from_stream
+ assert obj == yaml_string_obj_from_string
+ assert (
+ obj
+ == obj_from_stream
+ == obj_from_string
+ == yaml_string_obj_from_stream
+ == yaml_string_obj_from_string
+ )
+ return {
+ "obj": obj,
+ "yaml_string": yaml_string,
+ "yaml_string_from_stream": yaml_string_from_stream,
+ "obj_from_stream": obj_from_stream,
+ "obj_from_string": obj_from_string,
+ "yaml_string_obj_from_string": yaml_string_obj_from_string,
+ }
diff --git a/ansible_collections/splunk/es/tests/unit/modules/__init__.py b/ansible_collections/splunk/es/tests/unit/modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/modules/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/modules/conftest.py
new file mode 100644
index 000000000..e19a1e04c
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/modules/conftest.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import json
+
+import pytest
+
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common._collections_compat import MutableMapping
+
+
+@pytest.fixture
+def patch_ansible_module(request, mocker):
+ if isinstance(request.param, string_types):
+ args = request.param
+ elif isinstance(request.param, MutableMapping):
+ if "ANSIBLE_MODULE_ARGS" not in request.param:
+ request.param = {"ANSIBLE_MODULE_ARGS": request.param}
+ if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"][
+ "_ansible_remote_tmp"
+ ] = "/tmp"
+ if (
+ "_ansible_keep_remote_files"
+ not in request.param["ANSIBLE_MODULE_ARGS"]
+ ):
+ request.param["ANSIBLE_MODULE_ARGS"][
+ "_ansible_keep_remote_files"
+ ] = False
+ args = json.dumps(request.param)
+ else:
+ raise Exception(
+ "Malformed data to the patch_ansible_module pytest fixture"
+ )
+
+ mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args))
diff --git a/ansible_collections/splunk/es/tests/unit/modules/utils.py b/ansible_collections/splunk/es/tests/unit/modules/utils.py
new file mode 100644
index 000000000..d55afc0b3
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/modules/utils.py
@@ -0,0 +1,51 @@
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+import json
+
+from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
+from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch
+from ansible.module_utils import basic
+from ansible.module_utils._text import to_bytes
+
+
+def set_module_args(args):
+ if "_ansible_remote_tmp" not in args:
+ args["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in args:
+ args["_ansible_keep_remote_files"] = False
+
+ args = json.dumps({"ANSIBLE_MODULE_ARGS": args})
+ basic._ANSIBLE_ARGS = to_bytes(args)
+
+
+class AnsibleExitJson(Exception):
+ pass
+
+
+class AnsibleFailJson(Exception):
+ pass
+
+
+def exit_json(*args, **kwargs):
+ if "changed" not in kwargs:
+ kwargs["changed"] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs):
+ kwargs["failed"] = True
+ raise AnsibleFailJson(kwargs)
+
+
+class ModuleTestCase(unittest.TestCase):
+ def setUp(self):
+ self.mock_module = patch.multiple(
+ basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json
+ )
+ self.mock_module.start()
+ self.mock_sleep = patch("time.sleep")
+ self.mock_sleep.start()
+ set_module_args({})
+ self.addCleanup(self.mock_module.stop)
+ self.addCleanup(self.mock_sleep.stop)
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py b/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py
new file mode 100644
index 000000000..b6a84fc78
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py
@@ -0,0 +1,443 @@
+# Copyright (c) 2022 Red Hat
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible.module_utils.six import PY2
+
+builtin_import = "builtins.__import__"
+if PY2:
+ builtin_import = "__builtin__.__import__"
+
+import tempfile
+from ansible.playbook.task import Task
+from ansible.template import Templar
+from ansible_collections.splunk.es.plugins.action.splunk_adaptive_response_notable_events import (
+ ActionModule,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import (
+ MagicMock,
+ patch,
+)
+
+RESPONSE_PAYLOAD = [
+ {
+ "entry": [
+ {
+ "content": {
+ "action.notable.param.default_owner": "",
+ "action.notable.param.default_status": "0",
+ "action.notable.param.drilldown_name": "test_drill_name",
+ "action.notable.param.drilldown_search": "test_drill",
+ "action.notable.param.drilldown_earliest_offset": "$info_min_time$",
+ "action.notable.param.drilldown_latest_offset": "$info_max_time$",
+ "action.notable.param.extract_artifacts": '{"asset": ["src", "dest", "dvc", "orig_host"],"identity": '
+ '["src_user", "user", "src_user_id", "src_user_role", "user_id", "user_role", "vendor_account"]}',
+ "action.notable.param.investigation_profiles": '{"profile://test profile 1":{}, "profile://test profile 2":{}, '
+ '"profile://test profile 3":{}}',
+ "action.notable.param.next_steps": '{"version": 1, "data": "[[action|makestreams]][[action|nbtstat]][[action|nslookup]]"}',
+ "action.notable.param.recommended_actions": "email,logevent,makestreams,nbtstat",
+ "action.notable.param.rule_description": "test notable event",
+ "action.notable.param.rule_title": "ansible_test_notable",
+ "action.notable.param.security_domain": "threat",
+ "action.notable.param.severity": "high",
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ "actions": "notable",
+ },
+ "name": "Ansible Test",
+ }
+ ]
+ },
+ {
+ "entry": [
+ {
+ "content": {
+ "action.notable.param.default_owner": "",
+ "action.notable.param.default_status": "",
+ "action.notable.param.drilldown_name": "test_drill_name",
+ "action.notable.param.drilldown_search": "test_drill",
+ "action.notable.param.drilldown_earliest_offset": "$info_min_time$",
+ "action.notable.param.drilldown_latest_offset": "$info_max_time$",
+ "action.notable.param.extract_artifacts": '{"asset": ["src", "dest"],"identity": ["src_user", "user", "src_user_id"]}',
+ "action.notable.param.investigation_profiles": '{"profile://test profile 1":{}, "profile://test profile 2":{}, '
+ '"profile://test profile 3":{}}',
+ "action.notable.param.next_steps": '{"version": 1, "data": "[[action|makestreams]]"}',
+ "action.notable.param.recommended_actions": "email,logevent",
+ "action.notable.param.rule_description": "test notable event",
+ "action.notable.param.rule_title": "ansible_test_notable",
+ "action.notable.param.security_domain": "threat",
+ "action.notable.param.severity": "high",
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ "actions": "notable",
+ },
+ "name": "Ansible Test",
+ }
+ ]
+ },
+]
+
+REQUEST_PAYLOAD = [
+ {
+ "correlation_search_name": "Ansible Test",
+ "default_status": "unassigned",
+ "description": "test notable event",
+ "drilldown_earliest_offset": "$info_min_time$",
+ "drilldown_latest_offset": "$info_max_time$",
+ "drilldown_name": "test_drill_name",
+ "drilldown_search": "test_drill",
+ "extract_artifacts": {
+ "asset": ["src", "dest", "dvc", "orig_host"],
+ "identity": [
+ "src_user",
+ "user",
+ "src_user_id",
+ "src_user_role",
+ "user_id",
+ "user_role",
+ "vendor_account",
+ ],
+ },
+ "investigation_profiles": [
+ "test profile 1",
+ "test profile 2",
+ "test profile 3",
+ ],
+ "next_steps": ["makestreams", "nbtstat", "nslookup"],
+ "name": "ansible_test_notable",
+ "recommended_actions": ["email", "logevent", "makestreams", "nbtstat"],
+ "security_domain": "threat",
+ "severity": "high",
+ },
+ {
+ "correlation_search_name": "Ansible Test",
+ "description": "test notable event",
+ "drilldown_earliest_offset": "$info_min_time$",
+ "drilldown_latest_offset": "$info_max_time$",
+ "extract_artifacts": {
+ "asset": ["src", "dest"],
+ "identity": ["src_user", "user", "src_user_id"],
+ },
+ "next_steps": ["makestreams"],
+ "name": "ansible_test_notable",
+ "recommended_actions": ["email", "logevent"],
+ "security_domain": "threat",
+ "severity": "high",
+ },
+]
+
+
+class TestSplunkEsAdaptiveResponseNotableEvents:
+ def setup(self):
+ task = MagicMock(Task)
+ # Ansible > 2.13 looks for check_mode in task
+ task.check_mode = False
+ play_context = MagicMock()
+ # Ansible <= 2.13 looks for check_mode in play_context
+ play_context.check_mode = False
+ connection = patch(
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ )
+ connection._socket_path = tempfile.NamedTemporaryFile().name
+ fake_loader = {}
+ templar = Templar(loader=fake_loader)
+ self._plugin = ActionModule(
+ task=task,
+ connection=connection,
+ play_context=play_context,
+ loader=fake_loader,
+ templar=templar,
+ shared_loader_obj=None,
+ )
+ self._plugin._task.action = "adaptive_response_notable_events"
+ self._plugin._task.async_val = False
+ self._task_vars = {}
+ self.metadata = {
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ "actions": "notable",
+ }
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_merged_01(
+ self, connection, monkeypatch
+ ):
+ metadata = {
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ "actions": "",
+ }
+ self._plugin.api_response = RESPONSE_PAYLOAD[0]
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}, metadata
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_merged_02(
+ self, connection, monkeypatch
+ ):
+ self._plugin.api_response = RESPONSE_PAYLOAD[0]
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = (
+ RESPONSE_PAYLOAD[0],
+ self.metadata,
+ )
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[1]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[1]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_merged_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD[0]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_replaced_01(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = (
+ RESPONSE_PAYLOAD[0],
+ self.metadata,
+ )
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD[0]
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[1]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_replaced_02(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = (
+ RESPONSE_PAYLOAD[0],
+ self.metadata,
+ )
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD[0]
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[1]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_replaced_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD[0]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_deleted(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = (
+ RESPONSE_PAYLOAD[0],
+ self.metadata,
+ )
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [
+ {
+ "correlation_search_name": "Ansible Test",
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_deleted_idempotent(
+ self, connection
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}, {}
+
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [
+ {
+ "correlation_search_name": "Ansible Test",
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_gathered(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = (
+ RESPONSE_PAYLOAD[0],
+ self.metadata,
+ )
+
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [
+ {
+ "correlation_search_name": "Ansible Test",
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py
new file mode 100644
index 000000000..fca268c98
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py
@@ -0,0 +1,373 @@
+# Copyright (c) 2022 Red Hat
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible.module_utils.six import PY2
+
+builtin_import = "builtins.__import__"
+if PY2:
+ builtin_import = "__builtin__.__import__"
+
+import tempfile
+from ansible.playbook.task import Task
+from ansible.template import Templar
+from ansible_collections.splunk.es.plugins.action.splunk_correlation_searches import (
+ ActionModule,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import (
+ MagicMock,
+ patch,
+)
+
+RESPONSE_PAYLOAD = {
+ "entry": [
+ {
+ "acl": {"app": "DA-ESS-EndpointProtection"},
+ "content": {
+ "action.correlationsearch.annotations": '{"cis20": ["test1"], "mitre_attack": ["test2"], "kill_chain_phases": ["test3"], '
+ '"nist": ["test4"], "test_framework": ["test5"]}',
+ "action.correlationsearch.enabled": "1",
+ "action.correlationsearch.label": "Ansible Test",
+ "alert.digest_mode": True,
+ "alert.suppress": False,
+ "alert.suppress.fields": "test_field1",
+ "alert.suppress.period": "5s",
+ "alert_comparator": "greater than",
+ "alert_threshold": "10",
+ "alert_type": "number of events",
+ "cron_schedule": "*/5 * * * *",
+ "description": "test description",
+ "disabled": False,
+ "dispatch.earliest_time": "-24h",
+ "dispatch.latest_time": "now",
+ "dispatch.rt_backfill": True,
+ "is_scheduled": True,
+ "realtime_schedule": True,
+ "request.ui_dispatch_app": "SplunkEnterpriseSecuritySuite",
+ "schedule_priority": "default",
+ "schedule_window": "0",
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ },
+ "name": "Ansible Test",
+ }
+ ]
+}
+
+REQUEST_PAYLOAD = [
+ {
+ "name": "Ansible Test",
+ "disabled": False,
+ "description": "test description",
+ "app": "DA-ESS-EndpointProtection",
+ "annotations": {
+ "cis20": ["test1"],
+ "mitre_attack": ["test2"],
+ "kill_chain_phases": ["test3"],
+ "nist": ["test4"],
+ "custom": [
+ {
+ "framework": "test_framework",
+ "custom_annotations": ["test5"],
+ }
+ ],
+ },
+ "ui_dispatch_context": "SplunkEnterpriseSecuritySuite",
+ "time_earliest": "-24h",
+ "time_latest": "now",
+ "cron_schedule": "*/5 * * * *",
+ "scheduling": "realtime",
+ "schedule_window": "0",
+ "schedule_priority": "default",
+ "trigger_alert": "once",
+ "trigger_alert_when": "number of events",
+ "trigger_alert_when_condition": "greater than",
+ "trigger_alert_when_value": "10",
+ "throttle_window_duration": "5s",
+ "throttle_fields_to_group_by": ["test_field1"],
+ "suppress_alerts": False,
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ },
+ {
+ "name": "Ansible Test",
+ "disabled": False,
+ "description": "test description",
+ "app": "SplunkEnterpriseSecuritySuite",
+ "annotations": {
+ "cis20": ["test1", "test2"],
+ "mitre_attack": ["test3", "test4"],
+ "kill_chain_phases": ["test5", "test6"],
+ "nist": ["test7", "test8"],
+ "custom": [
+ {
+ "framework": "test_framework2",
+ "custom_annotations": ["test9", "test10"],
+ }
+ ],
+ },
+ "ui_dispatch_context": "SplunkEnterpriseSecuritySuite",
+ "time_earliest": "-24h",
+ "time_latest": "now",
+ "cron_schedule": "*/5 * * * *",
+ "scheduling": "continuous",
+ "schedule_window": "auto",
+ "schedule_priority": "default",
+ "trigger_alert": "once",
+ "trigger_alert_when": "number of events",
+ "trigger_alert_when_condition": "greater than",
+ "trigger_alert_when_value": "10",
+ "throttle_window_duration": "5s",
+ "throttle_fields_to_group_by": ["test_field1", "test_field2"],
+ "suppress_alerts": True,
+ "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
+ 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
+ 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio'
+ 'n.src" as "src" | where "count">=6',
+ },
+]
+
+
+class TestSplunkEsCorrelationSearches:
+ def setup(self):
+ task = MagicMock(Task)
+ # Ansible > 2.13 looks for check_mode in task
+ task.check_mode = False
+ play_context = MagicMock()
+ # Ansible <= 2.13 looks for check_mode in play_context
+ play_context.check_mode = False
+ connection = patch(
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ )
+ connection._socket_path = tempfile.NamedTemporaryFile().name
+ fake_loader = {}
+ templar = Templar(loader=fake_loader)
+ self._plugin = ActionModule(
+ task=task,
+ connection=connection,
+ play_context=play_context,
+ loader=fake_loader,
+ templar=templar,
+ shared_loader_obj=None,
+ )
+ self._plugin._task.action = "correlation_searches"
+ self._plugin._task.async_val = False
+ self._task_vars = {}
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_merged(self, connection, monkeypatch):
+ self._plugin.api_response = RESPONSE_PAYLOAD
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_merged_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_replaced_01(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[1]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_replaced_02(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[1]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_replaced_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_deleted(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ def delete_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [{"name": "Ansible Test"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_deleted_idempotent(self, connection):
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [{"name": "Ansible Test"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_correlation_searches_gathered(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [{"name": "Ansible Test"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py
new file mode 100644
index 000000000..068fe638d
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py
@@ -0,0 +1,357 @@
+# Copyright (c) 2022 Red Hat
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible.module_utils.six import PY2
+
+builtin_import = "builtins.__import__"
+if PY2:
+ builtin_import = "__builtin__.__import__"
+
+import tempfile
+from ansible.playbook.task import Task
+from ansible.template import Templar
+from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_monitor import (
+ ActionModule,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import (
+ MagicMock,
+ patch,
+)
+
+RESPONSE_PAYLOAD = {
+ "entry": [
+ {
+ "content": {
+ "_rcvbuf": 1572864,
+ "blacklist": "//var/log/[a-z]/gm",
+ "check-index": None,
+ "crcSalt": "<SOURCE>",
+ "disabled": False,
+ "eai:acl": None,
+ "filecount": 74,
+ "filestatecount": 82,
+ "followTail": False,
+ "host": "$decideOnStartup",
+ "host_regex": "/(test_host)/gm",
+ "host_resolved": "ip-172-31-52-131.us-west-2.compute.internal",
+ "host_segment": 3,
+ "ignoreOlderThan": "5d",
+ "index": "default",
+ "recursive": True,
+ "source": "test",
+ "sourcetype": "test_source_type",
+ "time_before_close": 4,
+ "whitelist": "//var/log/[0-9]/gm",
+ },
+ "name": "/var/log",
+ }
+ ]
+}
+
+REQUEST_PAYLOAD = [
+ {
+ "blacklist": "//var/log/[a-z]/gm",
+ "crc_salt": "<SOURCE>",
+ "disabled": False,
+ "follow_tail": False,
+ "host": "$decideOnStartup",
+ "host_regex": "/(test_host)/gm",
+ "host_segment": 3,
+ "index": "default",
+ "name": "/var/log",
+ "recursive": True,
+ "sourcetype": "test_source_type",
+ "whitelist": "//var/log/[0-9]/gm",
+ },
+ {
+ "blacklist": "//var/log/[a-z0-9]/gm",
+ "crc_salt": "<SOURCE>",
+ "disabled": False,
+ "follow_tail": False,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "name": "/var/log",
+ "recursive": True,
+ },
+]
+
+
+class TestSplunkEsDataInputsMonitorRules:
+ def setup(self):
+ task = MagicMock(Task)
+ # Ansible > 2.13 looks for check_mode in task
+ task.check_mode = False
+ play_context = MagicMock()
+ # Ansible <= 2.13 looks for check_mode in play_context
+ play_context.check_mode = False
+ connection = patch(
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ )
+ connection._socket_path = tempfile.NamedTemporaryFile().name
+ fake_loader = {}
+ templar = Templar(loader=fake_loader)
+ self._plugin = ActionModule(
+ task=task,
+ connection=connection,
+ play_context=play_context,
+ loader=fake_loader,
+ templar=templar,
+ shared_loader_obj=None,
+ )
+ self._plugin._task.action = "data_inputs_monitor"
+ self._plugin._task.async_val = False
+ self._task_vars = {}
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_merged(self, connection, monkeypatch):
+ self._plugin.api_response = RESPONSE_PAYLOAD
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_merged_idempotent(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [
+ {
+ "blacklist": "//var/log/[a-z]/gm",
+ "crc_salt": "<SOURCE>",
+ "disabled": False,
+ "follow_tail": False,
+ "host": "$decideOnStartup",
+ "host_regex": "/(test_host)/gm",
+ "host_segment": 3,
+ "index": "default",
+ "name": "/var/log",
+ "recursive": True,
+ "sourcetype": "test_source_type",
+ "whitelist": "//var/log/[0-9]/gm",
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_replaced(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [
+ {
+ "blacklist": "//var/log/[a-z0-9]/gm",
+ "crc_salt": "<SOURCE>",
+ "disabled": False,
+ "follow_tail": False,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "name": "/var/log",
+ "recursive": True,
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_replaced_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return {
+ "entry": [
+ {
+ "content": {
+ "_rcvbuf": 1572864,
+ "blacklist": "//var/log/[a-z]/gm",
+ "check-index": None,
+ "crcSalt": "<SOURCE>",
+ "disabled": False,
+ "eai:acl": None,
+ "filecount": 74,
+ "filestatecount": 82,
+ "followTail": False,
+ "host": "$decideOnStartup",
+ "host_regex": "/(test_host)/gm",
+ "host_resolved": "ip-172-31-52-131.us-west-2.compute.internal",
+ "host_segment": 3,
+ "ignoreOlderThan": "5d",
+ "index": "default",
+ "recursive": True,
+ "source": "test",
+ "sourcetype": "test_source_type",
+ "time_before_close": 4,
+ "whitelist": "//var/log/[0-9]/gm",
+ },
+ "name": "/var/log",
+ }
+ ]
+ }
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [
+ {
+ "blacklist": "//var/log/[a-z]/gm",
+ "crc_salt": "<SOURCE>",
+ "disabled": False,
+ "follow_tail": False,
+ "host": "$decideOnStartup",
+ "host_regex": "/(test_host)/gm",
+ "host_segment": 3,
+ "index": "default",
+ "name": "/var/log",
+ "recursive": True,
+ "sourcetype": "test_source_type",
+ "whitelist": "//var/log/[0-9]/gm",
+ }
+ ],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_deleted(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return RESPONSE_PAYLOAD
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [{"name": "/var/log"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_deleted_idempotent(self, connection):
+ self._plugin.search_for_resource_name = MagicMock()
+ self._plugin.search_for_resource_name.return_value = {}
+
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [{"name": "/var/log"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_monitor_gathered(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [{"name": "/var/log"}],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py
new file mode 100644
index 000000000..dbadf9052
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py
@@ -0,0 +1,711 @@
+# Copyright (c) 2022 Red Hat
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from ansible.module_utils.six import PY2
+
+builtin_import = "builtins.__import__"
+if PY2:
+ builtin_import = "__builtin__.__import__"
+
+import tempfile
+from ansible.playbook.task import Task
+from ansible.template import Templar
+from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_network import (
+ ActionModule,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import (
+ MagicMock,
+ patch,
+)
+
+RESPONSE_PAYLOAD = {
+ "tcp_cooked": {
+ "entry": [
+ {
+ "name": "default:8100",
+ "content": {
+ "connection_host": "ip",
+ "disabled": False,
+ "host": "$decideOnStartup",
+ "restrictToHost": "default",
+ },
+ }
+ ],
+ },
+ "tcp_raw": {
+ "entry": [
+ {
+ "name": "default:8101",
+ "content": {
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "queue": "parsingQueue",
+ "rawTcpDoneTimeout": 9,
+ "restrictToHost": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ }
+ ],
+ },
+ "udp": {
+ "entry": [
+ {
+ "name": "default:7890",
+ "content": {
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "no_appending_timestamp": True,
+ "no_priority_stripping": True,
+ "queue": "parsingQueue",
+ "restrictToHost": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ }
+ ],
+ },
+ "splunktcptoken": {
+ "entry": [
+ {
+ "name": "splunktcptoken://test_token",
+ "content": {
+ "token": "01234567-0123-0123-0123-012345678901",
+ },
+ }
+ ],
+ },
+ "ssl": {
+ "entry": [
+ {
+ "name": "test_host",
+ "content": {},
+ }
+ ],
+ },
+}
+
+REQUEST_PAYLOAD = {
+ "tcp_cooked": {
+ "protocol": "tcp",
+ "datatype": "cooked",
+ "name": 8100,
+ "connection_host": "ip",
+ "disabled": False,
+ "host": "$decideOnStartup",
+ "restrict_to_host": "default",
+ },
+ "tcp_raw": {
+ "protocol": "tcp",
+ "datatype": "raw",
+ "name": 8101,
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "queue": "parsingQueue",
+ "raw_tcp_done_timeout": 9,
+ "restrict_to_host": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ "udp": {
+ "protocol": "udp",
+ "name": 7890,
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "no_appending_timestamp": True,
+ "no_priority_stripping": True,
+ "queue": "parsingQueue",
+ "restrict_to_host": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ "splunktcptoken": {
+ "protocol": "tcp",
+ "datatype": "splunktcptoken",
+ "name": "test_token",
+ "token": "01234567-0123-0123-0123-012345678901",
+ },
+ "ssl": {
+ "protocol": "tcp",
+ "datatype": "ssl",
+ "name": "test_host",
+ },
+}
+
+REPLACED_RESPONSE_PAYLOAD = {
+ "tcp_cooked": {
+ "entry": [
+ {
+ "name": "default:8100",
+ "content": {
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "restrictToHost": "default",
+ },
+ }
+ ],
+ },
+ "tcp_raw": {
+ "entry": [
+ {
+ "name": "default:8101",
+ "content": {
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "queue": "parsingQueue",
+ "rawTcpDoneTimeout": 10,
+ "restrictToHost": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ }
+ ],
+ },
+ "udp": {
+ "entry": [
+ {
+ "name": "default:7890",
+ "content": {
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "no_appending_timestamp": False,
+ "no_priority_stripping": False,
+ "queue": "parsingQueue",
+ "restrictToHost": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ }
+ ],
+ },
+ "splunktcptoken": {
+ "entry": [
+ {
+ "name": "splunktcptoken://test_token",
+ "content": {
+ "token": "01234567-0123-0123-0123-012345678900",
+ },
+ }
+ ],
+ },
+}
+
+REPLACED_REQUEST_PAYLOAD = {
+ "tcp_cooked": {
+ "protocol": "tcp",
+ "datatype": "cooked",
+ "name": "default:8100",
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "restrict_to_host": "default",
+ },
+ "tcp_raw": {
+ "protocol": "tcp",
+ "datatype": "raw",
+ "name": "default:8101",
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "queue": "parsingQueue",
+ "raw_tcp_done_timeout": 10,
+ "restrict_to_host": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ "udp": {
+ "protocol": "udp",
+ "name": "default:7890",
+ "connection_host": "ip",
+ "disabled": True,
+ "host": "$decideOnStartup",
+ "index": "default",
+ "no_appending_timestamp": False,
+ "no_priority_stripping": False,
+ "queue": "parsingQueue",
+ "restrict_to_host": "default",
+ "source": "test_source",
+ "sourcetype": "test_source_type",
+ },
+ "splunktcptoken": {
+ "protocol": "tcp",
+ "datatype": "splunktcptoken",
+ "name": "splunktcptoken://test_token",
+ "token": "01234567-0123-0123-0123-012345678900",
+ },
+}
+
+
+class TestSplunkEsDataInputsNetworksRules:
+ def setup(self):
+ task = MagicMock(Task)
+ # Ansible > 2.13 looks for check_mode in task
+ task.check_mode = False
+ play_context = MagicMock()
+ # Ansible <= 2.13 looks for check_mode in play_context
+ play_context.check_mode = False
+ connection = patch(
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ )
+ connection._socket_path = tempfile.NamedTemporaryFile().name
+ fake_loader = {}
+ templar = Templar(loader=fake_loader)
+ self._plugin = ActionModule(
+ task=task,
+ connection=connection,
+ play_context=play_context,
+ loader=fake_loader,
+ templar=templar,
+ shared_loader_obj=None,
+ )
+ self._plugin._task.action = "data_inputs_network"
+ self._plugin._task.async_val = False
+ self._task_vars = {}
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_merged(self, connection, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ # patch update operation
+ update_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def get_by_path(self, path):
+ return {}
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return update_response
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+
+ # tcp_cooked
+ update_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # tcp_raw
+ update_response = RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # udp
+ update_response = RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # splunktcptoken
+ update_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # ssl
+ update_response = RESPONSE_PAYLOAD["ssl"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["ssl"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_merged_idempotent(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ # patch get operation
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def get_by_path(self, path):
+ return get_response
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ # tcp_cooked
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # tcp_raw
+ get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # udp
+ get_response = RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # splunktcptoken
+ get_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # ssl
+ get_response = RESPONSE_PAYLOAD["ssl"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["ssl"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_replaced(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ # patch get operation
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ # patch update operation
+ update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
+
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def delete_by_path(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return {}
+
+ def create_update(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return update_response
+
+ def get_by_path(self, path):
+ return get_response
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+
+ # tcp_cooked
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # tcp_raw
+ get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ update_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # udp
+ get_response = RESPONSE_PAYLOAD["udp"]
+ update_response = REPLACED_RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # splunktcptoken
+ get_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ update_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_replaced_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ # patch get operation
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def get_by_path(self, path):
+ return get_response
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ # tcp_cooked
+ get_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # tcp_raw
+ get_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # udp
+ get_response = REPLACED_RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # splunktcptoken
+ get_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "replaced",
+ "config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_deleted(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def delete_by_path(
+ self, rest_path, data=None, mock=None, mock_data=None
+ ):
+ return {}
+
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def get_by_path(self, path):
+ return get_response
+
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ # tcp_cooked
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # tcp_raw
+ get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # udp
+ get_response = RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ # splunktcptoken
+ get_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is True
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_deleted_idempotent(
+ self, conn, monkeypatch
+ ):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ def get_by_path(self, path):
+ return {}
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ # tcp_cooked
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # tcp_raw
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # udp
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # splunktcptoken
+ self._plugin._task.args = {
+ "state": "deleted",
+ "config": [REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_data_inputs_network_gathered(self, conn, monkeypatch):
+ self._plugin._connection.socket_path = (
+ tempfile.NamedTemporaryFile().name
+ )
+ self._plugin._connection._shell = MagicMock()
+
+ # patch get operation
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+
+ def get_by_path(self, path):
+ return get_response
+
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ # tcp_cooked
+ get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [REQUEST_PAYLOAD["tcp_cooked"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # tcp_raw
+ get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [REQUEST_PAYLOAD["tcp_raw"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # udp
+ get_response = RESPONSE_PAYLOAD["udp"]
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [REQUEST_PAYLOAD["udp"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # splunktcptoken
+ get_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ self._plugin._task.args = {
+ "state": "gathered",
+ "config": [REQUEST_PAYLOAD["splunktcptoken"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
+
+ # ssl
+ get_response = RESPONSE_PAYLOAD["ssl"]
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD["ssl"]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ assert result["changed"] is False
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py
new file mode 100644
index 000000000..e19a1e04c
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import json
+
+import pytest
+
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common._collections_compat import MutableMapping
+
+
+@pytest.fixture
+def patch_ansible_module(request, mocker):
+ if isinstance(request.param, string_types):
+ args = request.param
+ elif isinstance(request.param, MutableMapping):
+ if "ANSIBLE_MODULE_ARGS" not in request.param:
+ request.param = {"ANSIBLE_MODULE_ARGS": request.param}
+ if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"][
+ "_ansible_remote_tmp"
+ ] = "/tmp"
+ if (
+ "_ansible_keep_remote_files"
+ not in request.param["ANSIBLE_MODULE_ARGS"]
+ ):
+ request.param["ANSIBLE_MODULE_ARGS"][
+ "_ansible_keep_remote_files"
+ ] = False
+ args = json.dumps(request.param)
+ else:
+ raise Exception(
+ "Malformed data to the patch_ansible_module pytest fixture"
+ )
+
+ mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args))
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py
new file mode 100644
index 000000000..d55afc0b3
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py
@@ -0,0 +1,51 @@
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+import json
+
+from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
+from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch
+from ansible.module_utils import basic
+from ansible.module_utils._text import to_bytes
+
+
+def set_module_args(args):
+ if "_ansible_remote_tmp" not in args:
+ args["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in args:
+ args["_ansible_keep_remote_files"] = False
+
+ args = json.dumps({"ANSIBLE_MODULE_ARGS": args})
+ basic._ANSIBLE_ARGS = to_bytes(args)
+
+
+class AnsibleExitJson(Exception):
+ pass
+
+
+class AnsibleFailJson(Exception):
+ pass
+
+
+def exit_json(*args, **kwargs):
+ if "changed" not in kwargs:
+ kwargs["changed"] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs):
+ kwargs["failed"] = True
+ raise AnsibleFailJson(kwargs)
+
+
+class ModuleTestCase(unittest.TestCase):
+ def setUp(self):
+ self.mock_module = patch.multiple(
+ basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json
+ )
+ self.mock_module.start()
+ self.mock_sleep = patch("time.sleep")
+ self.mock_sleep.start()
+ set_module_args({})
+ self.addCleanup(self.mock_module.stop)
+ self.addCleanup(self.mock_sleep.stop)
diff --git a/ansible_collections/splunk/es/tests/unit/requirements.txt b/ansible_collections/splunk/es/tests/unit/requirements.txt
new file mode 100644
index 000000000..a9772bea1
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/unit/requirements.txt
@@ -0,0 +1,42 @@
+boto3
+placebo
+pycrypto
+passlib
+pypsrp
+python-memcached
+pytz
+pyvmomi
+redis
+requests
+setuptools > 0.6 # pytest-xdist installed via requirements does not work with very old setuptools (sanity_ok)
+unittest2 ; python_version < '2.7'
+importlib ; python_version < '2.7'
+netaddr
+ipaddress
+netapp-lib
+solidfire-sdk-python
+
+# requirements for F5 specific modules
+f5-sdk ; python_version >= '2.7'
+f5-icontrol-rest ; python_version >= '2.7'
+deepdiff
+
+# requirement for Fortinet specific modules
+pyFMG
+
+# requirement for aci_rest module
+xmljson
+
+# requirement for winrm connection plugin tests
+pexpect
+
+# requirement for the linode module
+linode-python # APIv3
+linode_api4 ; python_version > '2.6' # APIv4
+
+# requirement for the gitlab module
+python-gitlab
+httmock
+
+# requirment for kubevirt modules
+openshift ; python_version >= '2.7'