summaryrefslogtreecommitdiffstats
path: root/ansible_collections/splunk
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
commit38b7c80217c4e72b1d8988eb1e60bb6e77334114 (patch)
tree356e9fd3762877d07cde52d21e77070aeff7e789 /ansible_collections/splunk
parentAdding upstream version 7.7.0+dfsg. (diff)
downloadansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.tar.xz
ansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.zip
Adding upstream version 9.4.0+dfsg.upstream/9.4.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/splunk')
-rw-r--r--ansible_collections/splunk/es/.ansible-lint5
-rw-r--r--ansible_collections/splunk/es/.darglint7
-rw-r--r--ansible_collections/splunk/es/.github/CODEOWNERS0
-rw-r--r--ansible_collections/splunk/es/.github/dependabot.yml9
-rw-r--r--ansible_collections/splunk/es/.github/release-drafter.yml3
-rw-r--r--ansible_collections/splunk/es/.github/workflows/ack.yml15
-rw-r--r--ansible_collections/splunk/es/.github/workflows/codecoverage.yml15
-rw-r--r--ansible_collections/splunk/es/.github/workflows/lint.yml13
-rw-r--r--ansible_collections/splunk/es/.github/workflows/push.yml27
-rw-r--r--ansible_collections/splunk/es/.github/workflows/release.yml14
-rw-r--r--ansible_collections/splunk/es/.github/workflows/tests.yml (renamed from ansible_collections/splunk/es/.github/workflows/test.yml)15
-rw-r--r--ansible_collections/splunk/es/.gitignore3
-rw-r--r--ansible_collections/splunk/es/.isort.cfg6
-rw-r--r--ansible_collections/splunk/es/.pre-commit-config.yaml39
-rw-r--r--ansible_collections/splunk/es/.prettierignore22
-rw-r--r--ansible_collections/splunk/es/CHANGELOG.rst34
-rw-r--r--ansible_collections/splunk/es/FILES.json920
-rw-r--r--ansible_collections/splunk/es/MANIFEST.json4
-rw-r--r--ansible_collections/splunk/es/README.md23
-rw-r--r--ansible_collections/splunk/es/bindep.txt2
-rw-r--r--ansible_collections/splunk/es/changelogs/changelog.yaml111
-rw-r--r--ansible_collections/splunk/es/changelogs/config.yaml34
-rw-r--r--ansible_collections/splunk/es/codecov.yml15
-rw-r--r--ansible_collections/splunk/es/cspell.config.yaml37
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst2
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst10
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst2
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst40
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst4
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst10
-rw-r--r--ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst10
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py96
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py83
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py68
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py74
-rw-r--r--ansible_collections/splunk/es/plugins/httpapi/splunk.py16
-rw-r--r--ansible_collections/splunk/es/plugins/module_utils/splunk.py41
-rw-r--r--ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py153
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search.py117
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search_info.py15
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_monitor.py101
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_network.py61
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py153
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py41
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py117
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py15
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py12
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py101
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py61
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py16
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py18
-rw-r--r--ansible_collections/splunk/es/pyproject.toml10
-rw-r--r--ansible_collections/splunk/es/test-requirements.txt13
-rw-r--r--ansible_collections/splunk/es/tests/config.yml3
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml40
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml52
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml38
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml28
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml17
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml4
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml8
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml74
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml21
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml4
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml5
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml8
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml4
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml17
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml4
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml8
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml12
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml5
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml16
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml11
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml11
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml47
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml17
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml4
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml8
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml12
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml2
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml10
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml10
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml20
-rw-r--r--ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml36
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.12.txt1
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.13.txt0
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.14.txt0
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.15.txt0
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.16.txt0
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.17.txt0
-rw-r--r--ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt2
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/builtins.py34
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/mock.py4
-rw-r--r--ansible_collections/splunk/es/tests/unit/compat/unittest.py2
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/loader.py5
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/path.py9
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/procenv.py9
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/vault_helper.py6
-rw-r--r--ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py38
-rw-r--r--ansible_collections/splunk/es/tests/unit/modules/conftest.py18
-rw-r--r--ansible_collections/splunk/es/tests/unit/modules/utils.py10
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py155
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py75
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py100
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py198
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py18
-rw-r--r--ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py10
-rw-r--r--ansible_collections/splunk/es/tox.ini8
114 files changed, 2205 insertions, 1896 deletions
diff --git a/ansible_collections/splunk/es/.ansible-lint b/ansible_collections/splunk/es/.ansible-lint
new file mode 100644
index 000000000..8d9bb70b8
--- /dev/null
+++ b/ansible_collections/splunk/es/.ansible-lint
@@ -0,0 +1,5 @@
+---
+profile: production
+
+exclude_paths:
+ - changelogs/changelog.yaml
diff --git a/ansible_collections/splunk/es/.darglint b/ansible_collections/splunk/es/.darglint
new file mode 100644
index 000000000..8e68aa3ec
--- /dev/null
+++ b/ansible_collections/splunk/es/.darglint
@@ -0,0 +1,7 @@
+[darglint]
+# NOTE: All `darglint` styles except for `sphinx` hit ridiculously low
+# NOTE: performance on some of the in-project Python modules.
+# Refs:
+# * https://github.com/terrencepreilly/darglint/issues/186
+docstring_style = sphinx
+strictness = full
diff --git a/ansible_collections/splunk/es/.github/CODEOWNERS b/ansible_collections/splunk/es/.github/CODEOWNERS
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/CODEOWNERS
diff --git a/ansible_collections/splunk/es/.github/dependabot.yml b/ansible_collections/splunk/es/.github/dependabot.yml
new file mode 100644
index 000000000..5b32d4c10
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/dependabot.yml
@@ -0,0 +1,9 @@
+---
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: daily
+ labels:
+ - "skip-changelog"
diff --git a/ansible_collections/splunk/es/.github/release-drafter.yml b/ansible_collections/splunk/es/.github/release-drafter.yml
new file mode 100644
index 000000000..e3e5966e4
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/release-drafter.yml
@@ -0,0 +1,3 @@
+---
+# see https://github.com/ansible-community/devtools
+_extends: ansible-community/devtools
diff --git a/ansible_collections/splunk/es/.github/workflows/ack.yml b/ansible_collections/splunk/es/.github/workflows/ack.yml
new file mode 100644
index 000000000..fda595dc5
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/workflows/ack.yml
@@ -0,0 +1,15 @@
+---
+# See https://github.com/ansible-community/devtools/blob/main/.github/workflows/ack.yml
+name: ack
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+on: # yamllint disable-line rule:truthy
+ pull_request_target:
+ types: [opened, labeled, unlabeled, synchronize]
+
+jobs:
+ ack:
+ uses: ansible/devtools/.github/workflows/ack.yml@main
diff --git a/ansible_collections/splunk/es/.github/workflows/codecoverage.yml b/ansible_collections/splunk/es/.github/workflows/codecoverage.yml
new file mode 100644
index 000000000..c2a7ad60d
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/workflows/codecoverage.yml
@@ -0,0 +1,15 @@
+---
+name: code_coverage
+
+on: # yamllint disable-line rule:truthy
+ push:
+ pull_request:
+ branches: [ main ]
+
+jobs:
+ codecoverage:
+ uses: ansible-network/github_actions/.github/workflows/coverage_network_devices.yml@main
+ with:
+ collection_pre_install: >-
+ git+https://github.com/ansible-collections/ansible.utils.git
+ git+https://github.com/ansible-collections/ansible.netcommon.git
diff --git a/ansible_collections/splunk/es/.github/workflows/lint.yml b/ansible_collections/splunk/es/.github/workflows/lint.yml
new file mode 100644
index 000000000..fbac38cbf
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/workflows/lint.yml
@@ -0,0 +1,13 @@
+---
+name: ansible-lint
+on: # yamllint disable-line rule:truthy
+ pull_request:
+ branches: ["main"]
+jobs:
+ build:
+ name: Ansible Lint
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Run ansible-lint
+ uses: ansible/ansible-lint@main
diff --git a/ansible_collections/splunk/es/.github/workflows/push.yml b/ansible_collections/splunk/es/.github/workflows/push.yml
new file mode 100644
index 000000000..dabb4351a
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/workflows/push.yml
@@ -0,0 +1,27 @@
+---
+# push workflow is shared and expected to perform actions after a merge happens
+# on a maintenance branch (default or release). For example updating the
+# draft release-notes.
+# based on great work from
+# https://github.com/T-Systems-MMS/ansible-collection-icinga-director
+name: push
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+on: # yamllint disable-line rule:truthy
+ workflow_dispatch:
+
+env:
+ NAMESPACE: splunk
+ COLLECTION_NAME: es
+ ANSIBLE_COLLECTIONS_PATHS: ./
+
+jobs:
+ update_release_draft:
+ uses: ansible/devtools/.github/workflows/push_network.yml@main
+ with:
+ repo: ansible-collections/splunk.es
+ secrets:
+ BOT_PAT: ${{ secrets.BOT_PAT }}
diff --git a/ansible_collections/splunk/es/.github/workflows/release.yml b/ansible_collections/splunk/es/.github/workflows/release.yml
new file mode 100644
index 000000000..eb04259d1
--- /dev/null
+++ b/ansible_collections/splunk/es/.github/workflows/release.yml
@@ -0,0 +1,14 @@
+---
+name: release
+on: # yamllint disable-line rule:truthy
+ release:
+ types: [published]
+
+jobs:
+ release:
+ uses: ansible/devtools/.github/workflows/release_collection.yml@main
+ with:
+ environment: release
+ secrets:
+ ah_token: ${{ secrets.AH_TOKEN }}
+ ansible_galaxy_api_key: ${{ secrets.ANSIBLE_GALAXY_API_KEY }}
diff --git a/ansible_collections/splunk/es/.github/workflows/test.yml b/ansible_collections/splunk/es/.github/workflows/tests.yml
index e2a19e00a..27ea93fa7 100644
--- a/ansible_collections/splunk/es/.github/workflows/test.yml
+++ b/ansible_collections/splunk/es/.github/workflows/tests.yml
@@ -1,18 +1,22 @@
---
-name: Test collection
+name: CI
concurrency:
- group: ${{ github.head_ref }}
+ group: ${{ github.head_ref || github.run_id }}
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
pull_request:
branches: [main]
workflow_dispatch:
+ schedule:
+ - cron: '0 0 * * *'
+
jobs:
changelog:
uses: ansible-network/github_actions/.github/workflows/changelog.yml@main
+ if: github.event_name == 'pull_request'
sanity:
uses: ansible-network/github_actions/.github/workflows/sanity.yml@main
unit-galaxy:
@@ -24,7 +28,7 @@ jobs:
git+https://github.com/ansible-collections/ansible.utils.git
git+https://github.com/ansible-collections/ansible.netcommon.git
all_green:
- if: ${{ always() }}
+ if: ${{ always() && (github.event_name != 'schedule') }}
needs:
- changelog
- sanity
@@ -33,9 +37,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- run: >-
- python -c "assert set([
+ python -c "assert 'failure' not in
+ set([
'${{ needs.changelog.result }}',
'${{ needs.sanity.result }}',
'${{ needs.unit-galaxy.result }}',
'${{ needs.unit-source.result }}'
- ]) == {'success'}" \ No newline at end of file
+ ])"
diff --git a/ansible_collections/splunk/es/.gitignore b/ansible_collections/splunk/es/.gitignore
index 53e44f6d7..6cffd9049 100644
--- a/ansible_collections/splunk/es/.gitignore
+++ b/ansible_collections/splunk/es/.gitignore
@@ -129,3 +129,6 @@ dmypy.json
.pyre/
tests/output/
+.vscode/
+
+changelogs/.plugin-cache.yaml
diff --git a/ansible_collections/splunk/es/.isort.cfg b/ansible_collections/splunk/es/.isort.cfg
new file mode 100644
index 000000000..0c1b8a8de
--- /dev/null
+++ b/ansible_collections/splunk/es/.isort.cfg
@@ -0,0 +1,6 @@
+[settings]
+known_first_party=ansible_collections.splunk.es
+line_length=100
+lines_after_imports=2
+lines_between_types=1
+profile=black
diff --git a/ansible_collections/splunk/es/.pre-commit-config.yaml b/ansible_collections/splunk/es/.pre-commit-config.yaml
index a4450aa3c..275086d8d 100644
--- a/ansible_collections/splunk/es/.pre-commit-config.yaml
+++ b/ansible_collections/splunk/es/.pre-commit-config.yaml
@@ -1,7 +1,13 @@
---
repos:
+ - repo: https://github.com/ansible-network/collection_prep
+ rev: 1.1.1
+ hooks:
+ # - id: autoversion # removed as being handled by GHA push and release drafter
+ - id: update-docs
+
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.2.0
+ rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: check-symlinks
@@ -10,12 +16,31 @@ repos:
- id: no-commit-to-branch
args: [--branch, main]
- id: trailing-whitespace
+
+ - repo: https://github.com/asottile/add-trailing-comma
+ rev: v3.1.0
+ hooks:
+ - id: add-trailing-comma
+
+ - repo: https://github.com/pre-commit/mirrors-prettier
+ rev: "v3.1.0"
+ hooks:
+ - id: prettier
+ entry: env CI=1 bash -c "prettier --list-different . || ec=$? && prettier --loglevel=error --write . && exit $ec"
+ pass_filenames: false
+ args: []
+ additional_dependencies:
+ - prettier
+ - prettier-plugin-toml
+
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.12.0
+ hooks:
+ - id: isort
+ name: Sort import statements using isort
+ args: ["--filter-files"]
+
- repo: https://github.com/psf/black
- rev: 22.3.0
+ rev: 23.11.0
hooks:
- id: black
- args: [-l, "79"]
- - repo: https://github.com/ansible-network/collection_prep
- rev: 1.0.0
- hooks:
- - id: update-docs
diff --git a/ansible_collections/splunk/es/.prettierignore b/ansible_collections/splunk/es/.prettierignore
new file mode 100644
index 000000000..9f980a682
--- /dev/null
+++ b/ansible_collections/splunk/es/.prettierignore
@@ -0,0 +1,22 @@
+# Stuff we don't want priettier to ever to look into
+.*/
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# A linked collection directory created by pytest-ansible-units
+
+collections/
+
+# Tracked but not manually edited
+
+# Tracked but manually formatted
+
+# WIP
+README.md
diff --git a/ansible_collections/splunk/es/CHANGELOG.rst b/ansible_collections/splunk/es/CHANGELOG.rst
index da4e628d2..cb8b14130 100644
--- a/ansible_collections/splunk/es/CHANGELOG.rst
+++ b/ansible_collections/splunk/es/CHANGELOG.rst
@@ -5,12 +5,42 @@ Splunk Enterprise Security Collection Release Notes
.. contents:: Topics
+v2.1.2
+======
+
+Bugfixes
+--------
+
+- Fixed argspec validation for plugins with empty task attributes when run with Ansible 2.9.
+
+v2.1.1
+======
+
+Release Summary
+---------------
+
+Releasing version 2.1.1, featuring various maintenance updates.
+
v2.1.0
======
Minor Changes
-------------
+- Added adaptive_response_notable_events resource module
+- Added correlation_searches resource module
+- Added data_inputs_monitors resource module
+- Added data_inputs_networks resource module
+
+New Modules
+-----------
+
+Ansible Collections
+~~~~~~~~~~~~~~~~~~~
+
+splunk.es.plugins.modules
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
- splunk_adaptive_response_notable_events - Manage Adaptive Responses notable events resource module
- splunk_correlation_searches - Splunk Enterprise Security Correlation searches resource module
- splunk_data_inputs_monitor - Splunk Data Inputs of type Monitor resource module
@@ -36,7 +66,7 @@ v1.0.2
Release Summary
---------------
-- Re-releasing the 1.0.2 with updated galaxy file
+Re-releasing 1.0.1 with updated galaxy file.
v1.0.1
======
@@ -44,7 +74,7 @@ v1.0.1
Release Summary
---------------
-- Releasing 1.0.1 with updated changelog.
+Releasing 1.0.1 with updated changelog.
v1.0.0
======
diff --git a/ansible_collections/splunk/es/FILES.json b/ansible_collections/splunk/es/FILES.json
index dee0ba2d3..744f60fb3 100644
--- a/ansible_collections/splunk/es/FILES.json
+++ b/ansible_collections/splunk/es/FILES.json
@@ -8,160 +8,195 @@
"format": 1
},
{
- "name": ".github",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "codecov.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4aa7e485dd4db6f8a55b046088c745def2b3145d9499ccda4e9a3336467dcea2",
"format": 1
},
{
- "name": ".github/workflows",
+ "name": "LICENSE",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "format": 1
+ },
+ {
+ "name": ".yamllint",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "827ef9e031ecdcaf137be239d33ef93fcbbc3611cbb6b30b0e507d0e03373d0e",
+ "format": 1
+ },
+ {
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/test.yml",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abbe2f2b782e28e478a011667782bcd93a86c21f1554f5eaa772305af4d37640",
+ "chksum_sha256": "a5ff05bca6bd4c71c1077632fdc7010ef5bab7c015eb99dfdadf5de56e381bfd",
"format": 1
},
{
- "name": "changelogs",
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "938dfaf404198185218a2b62d141f6ccf09c6a076408874cf4bc3c45f313c727",
+ "format": 1
+ },
+ {
+ "name": "plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments",
+ "name": "plugins/httpapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments/.keep",
+ "name": "plugins/httpapi/splunk.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5c17dd1ec114b8bf3d25f0a84e8ce5a8eeea675932543a04bab03daf55904ac6",
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ba2ed344c4e522ff07307c59bc83f28297363d0ed60e0a5ff6a5cba44c9a9f85",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/config.yaml",
+ "name": "plugins/modules/splunk_data_input_monitor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6c37d81485636d11d3658c05ae604ddcee8a2520cf831763b765b511ae5e522",
+ "chksum_sha256": "216caef6ec6b6f846989430797677bd460a6d16601f3aa3906e1277d621e5033",
"format": 1
},
{
- "name": "docs",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/splunk_data_inputs_network.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a2bf818f3844456d9da4086f1d456bc6de59a25d8b7ca4baedddeaefc5df5669",
"format": 1
},
{
- "name": "docs/splunk.es.adaptive_response_notable_event_module.rst",
+ "name": "plugins/modules/adaptive_response_notable_event.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5ba38fde8ea6535297b89f3d307c4e6a4947a8e141da20614c68d31968e613f",
+ "chksum_sha256": "acad8c15a49747f365604a37e16d580a0b0a12734dc3f2585f5d6d48e27516a1",
"format": 1
},
{
- "name": "docs/splunk.es.correlation_search_info_module.rst",
+ "name": "plugins/modules/correlation_search_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e374b7c71d8a2b47033ef37218c3f1e7669239f4ab03ae1cd24d8c39adfcee3c",
+ "chksum_sha256": "73bf3ec288a2df3c9e61eca079688f34b10b2da272ddb80242d84a5d2a45e745",
"format": 1
},
{
- "name": "docs/splunk.es.correlation_search_module.rst",
+ "name": "plugins/modules/correlation_search.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc5f185336595c92d966668a1bf632162befa22b7b2875d180d4226d4e45d48d",
+ "chksum_sha256": "94c917aa12f21fc3a8cad7bc3d62712ef32eb636425d9ff05b70d39a0b2c5048",
"format": 1
},
{
- "name": "docs/splunk.es.data_input_monitor_module.rst",
+ "name": "plugins/modules/splunk_data_inputs_monitor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "871ee47e0650ef2a8eb79477b490491170cef7d4da3def7465e686e28ccd86a9",
+ "chksum_sha256": "9561661b9bb7d3952d73304d867c352ed4454b45d719b720353c715648fc3572",
"format": 1
},
{
- "name": "docs/splunk.es.data_input_network_module.rst",
+ "name": "plugins/modules/splunk_adaptive_response_notable_event.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ab32953ec3c92411bbf989578e5e3898a8fb77a7df9500e90883cdf6b2632a8",
+ "chksum_sha256": "acad8c15a49747f365604a37e16d580a0b0a12734dc3f2585f5d6d48e27516a1",
"format": 1
},
{
- "name": "docs/splunk.es.splunk_adaptive_response_notable_events_module.rst",
+ "name": "plugins/modules/data_input_monitor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b7867e09c61ef22dc25cbbc86cb4139afb879270fac22f26294111fd2d70773",
+ "chksum_sha256": "216caef6ec6b6f846989430797677bd460a6d16601f3aa3906e1277d621e5033",
"format": 1
},
{
- "name": "docs/splunk.es.splunk_correlation_searches_module.rst",
+ "name": "plugins/modules/splunk_adaptive_response_notable_events.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5651ffdca7a73dc9d8b19025d652e6c0c15b7a387d91cd3fc7ec3f6106fed7f9",
+ "chksum_sha256": "9fbed0f19c2fd182a796881d8a47b645e9ae6a7352ed560b8d235fd46207d77f",
"format": 1
},
{
- "name": "docs/splunk.es.splunk_data_inputs_monitor_module.rst",
+ "name": "plugins/modules/splunk_correlation_searches.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b10ed82159024825f4f8b4ad09437c675b60f176fd6fb0f7a61390656ca99e5f",
+ "chksum_sha256": "329a0d14d4bd9e448056916f738c674eccbf602f6ce9564ed2e53abbdad83824",
"format": 1
},
{
- "name": "docs/splunk.es.splunk_data_inputs_network_module.rst",
+ "name": "plugins/modules/splunk_correlation_search_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd0549229fd8ab0e612c37b66c5c32d5b2783cde25bc7afacec96f275a184d14",
+ "chksum_sha256": "73bf3ec288a2df3c9e61eca079688f34b10b2da272ddb80242d84a5d2a45e745",
"format": 1
},
{
- "name": "docs/splunk.es.splunk_httpapi.rst",
+ "name": "plugins/modules/data_input_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7b00b66d8d113d97580211a4984c28a84031a259ef8649a2fc13d24f7be2adc",
+ "chksum_sha256": "376aed40c8b10f1d0b15e74cdcae6c44293fb4c9a06223e8a007c74aec76e9e2",
"format": 1
},
{
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/splunk_correlation_search.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "94c917aa12f21fc3a8cad7bc3d62712ef32eb636425d9ff05b70d39a0b2c5048",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "plugins/modules/splunk_data_input_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5ff05bca6bd4c71c1077632fdc7010ef5bab7c015eb99dfdadf5de56e381bfd",
+ "chksum_sha256": "376aed40c8b10f1d0b15e74cdcae6c44293fb4c9a06223e8a007c74aec76e9e2",
"format": 1
},
{
- "name": "plugins",
+ "name": "plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
+ "name": "plugins/module_utils/splunk.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2687558259e88c70ba236692c2005426170ee14120d848d3d57c43635d79fbd2",
+ "format": 1
+ },
+ {
"name": "plugins/action",
"ftype": "dir",
"chksum_type": null,
@@ -169,388 +204,416 @@
"format": 1
},
{
- "name": "plugins/action/splunk_adaptive_response_notable_events.py",
+ "name": "plugins/action/splunk_data_inputs_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ccad3cbf8935c826b189f915203d455e9db1076ae11c96bd44c716e7c3812e8",
+ "chksum_sha256": "6a8555b9793bb49d1e250f831d9e71c4bdfd9a09a8ec97c6687a83f7de5995f2",
"format": 1
},
{
- "name": "plugins/action/splunk_correlation_searches.py",
+ "name": "plugins/action/splunk_data_inputs_monitor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b50d96a7c6d982a8946939feee7061ed30508ae7fbb87f50eb2d7ad5a57bc8f",
+ "chksum_sha256": "b0496b2d27d2aca94f87d7aef8302d324ccf1e0d4d64465b021f5bf31f4f1b20",
"format": 1
},
{
- "name": "plugins/action/splunk_data_inputs_monitor.py",
+ "name": "plugins/action/splunk_adaptive_response_notable_events.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd9cc47a4f0a5acb068e836cc3e8df9787bfd642aa8a3772caae3254b2d0f5bf",
+ "chksum_sha256": "7edb7fa8d9e2123743cc8f8e20e94de9eb925a1011bf04d66d45f57b9525ba97",
"format": 1
},
{
- "name": "plugins/action/splunk_data_inputs_network.py",
+ "name": "plugins/action/splunk_correlation_searches.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be803b1d01356fa3ff6cf595f7259bdbd7bf3722945d49fd1c729bc7278bdead",
+ "chksum_sha256": "691c81fdc1e2c6f595807a0f7a94a2e057c92110aa211e1372fd2564c3aa0b92",
"format": 1
},
{
- "name": "plugins/httpapi",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".ansible-lint",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b47f72e159f93a5ff07ea2534752e0fa977b214e9ac05c667fa83ac13be4e50c",
"format": 1
},
{
- "name": "plugins/httpapi/splunk.py",
+ "name": "test-requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "63a8b202d37153330a0b403a4c614072d370e0956778d2645e1767df20a92c62",
+ "chksum_sha256": "ef11b7e31c53ed88a2352faacda33806dda00f45d4e9cce540a40e6a47ccbe73",
"format": 1
},
{
- "name": "plugins/module_utils",
+ "name": "tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/splunk.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "01a916fef4e7984fd44a871a41ef042ffd29095fcdae8ed971ba39073069b344",
- "format": 1
- },
- {
- "name": "plugins/modules",
+ "name": "tests/sanity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/splunk_adaptive_response_notable_event.py",
+ "name": "tests/sanity/ignore-2.9.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a25efb25ab077bd88519ea3426fb8a13515e16036a4073a7fba3b054d6effa56",
+ "chksum_sha256": "968eecbadf439555025acc97355cd970ac992bb88948224a5ec93b0e5149d36a",
"format": 1
},
{
- "name": "plugins/modules/splunk_correlation_search.py",
+ "name": "tests/sanity/ignore-2.13.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c07af58dae3541b805dede95a25d557085593dc29f897d711498aedd9f284812",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/splunk_correlation_search_info.py",
+ "name": "tests/sanity/ignore-2.11.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1c321bb59558e65920d07b8b5f664e27efcced9fd7e01c45f9a11c43faf8cbe",
+ "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a",
"format": 1
},
{
- "name": "plugins/modules/splunk_data_input_monitor.py",
+ "name": "tests/sanity/ignore-2.10.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6342870ecb8e2edc36d3c15496735de964b74308abdd3835350ff95512676edc",
+ "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a",
"format": 1
},
{
- "name": "plugins/modules/splunk_data_input_network.py",
+ "name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08dc398f64b71cd5d81bb1d7f82db25ed089b297f77c5fe0beb35b648d5c7310",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/adaptive_response_notable_event.py",
+ "name": "tests/sanity/ignore-2.17.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a25efb25ab077bd88519ea3426fb8a13515e16036a4073a7fba3b054d6effa56",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/correlation_search.py",
+ "name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c07af58dae3541b805dede95a25d557085593dc29f897d711498aedd9f284812",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/correlation_search_info.py",
+ "name": "tests/sanity/ignore-2.12.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1c321bb59558e65920d07b8b5f664e27efcced9fd7e01c45f9a11c43faf8cbe",
+ "chksum_sha256": "12ec031c8ec4844396e76b57382d04fcd0a40f2cecbb07dcf091afef035b5cb7",
"format": 1
},
{
- "name": "plugins/modules/data_input_monitor.py",
+ "name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6342870ecb8e2edc36d3c15496735de964b74308abdd3835350ff95512676edc",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/data_input_network.py",
+ "name": "tests/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08dc398f64b71cd5d81bb1d7f82db25ed089b297f77c5fe0beb35b648d5c7310",
+ "chksum_sha256": "de5087316490411841c67aa3307cfdd3acaea09875c9b4dee6852bca7c120764",
"format": 1
},
{
- "name": "plugins/modules/splunk_adaptive_response_notable_events.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "edd1d1cf0096053a34137462bce7b3ece10a0cacb0a88846cf280c74aa1c963a",
+ "name": "tests/unit",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/splunk_correlation_searches.py",
+ "name": "tests/unit/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6271e8b5b280de34ec96d54d664fd20fb2bd4ab9a7f44b641ef2712c094628c",
+ "chksum_sha256": "49ba996dc4735c3463e9af561344346dfae14bcc1a68096ce78364b377f0df1f",
"format": 1
},
{
- "name": "plugins/modules/splunk_data_inputs_monitor.py",
+ "name": "tests/unit/plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa782e642c0756b61a703318d60116cb4267a0a37296f5beffe6b275afbac668",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/splunk_data_inputs_network.py",
+ "name": "tests/unit/plugins/modules/conftest.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "988f21bf5004878470902c794a652c9c388893617e45859dd126b1cbdba8d70c",
+ "chksum_sha256": "e0ae70fa7c2a5e56d7f02a47c3602398cff60c8eb021772ac59a76df2a234048",
"format": 1
},
{
- "name": "tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/plugins/modules/utils.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b1225bd17ba108d2e1f0532b303d58e869af59775e9a4f98faacb2ff4c29491f",
"format": 1
},
{
- "name": "tests/integration",
+ "name": "tests/unit/plugins/action",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/plugins/action/test_es_correlation_searches.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3ab54848f56bb67a89d5ee8d1a2d113f12adb4214b9abe48cc449d396579d8a1",
"format": 1
},
{
- "name": "tests/integration/targets/adaptive_response_notable_event",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/plugins/action/test_es_data_inputs_monitors.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d04d9fa4e55d5ef9581310a5fc95fc6ef02cb44d7406a420c7ecf47c9932500e",
"format": 1
},
{
- "name": "tests/integration/targets/adaptive_response_notable_event/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/plugins/action/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/adaptive_response_notable_event/tasks/main.yml",
+ "name": "tests/unit/plugins/action/test_es_data_inputs_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "344cff5c902b2c539ab54ef475c026d955a7c71becfeef0123295715118e706b",
+ "chksum_sha256": "26298844b40eba0d8e63e87dae541a7deb25e2b0bfd3660df90c5365cd31d243",
"format": 1
},
{
- "name": "tests/integration/targets/adaptive_response_notable_event/aliases",
+ "name": "tests/unit/plugins/action/test_es_adaptive_response_notable_events.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
+ "chksum_sha256": "f4307b7d5065b8141b5ec63139da75c68c3d1d9073bf2e14b39ce2beb8d79718",
"format": 1
},
{
- "name": "tests/integration/targets/correlation_search_info",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/correlation_search_info/tasks",
+ "name": "tests/unit/mock",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/correlation_search_info/tasks/main.yml",
+ "name": "tests/unit/mock/vault_helper.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6660a367fbbc59393ebba29b9e733a103bf37b58fa37a1e1520039e06b737e8",
+ "chksum_sha256": "55bd0e924dcc22d050612c944bff0ef745e51faac9260dce9b9b2018c4c8a661",
"format": 1
},
{
- "name": "tests/integration/targets/correlation_search_info/aliases",
+ "name": "tests/unit/mock/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_monitor",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/mock/procenv.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f1ca7faf56e1d96d240d1d2a01da3580e9e80ae08e623c28e17a244a312c9154",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_monitor/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/mock/loader.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "07eb6a715b3bc2a0f03d6fbaa5428cb74796403e1d30348f0d6c88022cea3eed",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_monitor/tasks/main.yml",
+ "name": "tests/unit/mock/yaml_helper.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a7a296c1e614d16fb885bbb21cbf2f4e61e4543e583a2703ec79a679937527b",
+ "chksum_sha256": "134eef238c83a9611799871b743e49e9bfbcd8bdddf2cc6a7bf69fd1000345b3",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_monitor/aliases",
+ "name": "tests/unit/mock/path.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
+ "chksum_sha256": "1d232228a36d23ac913a46339dbea7ecc992d71f020fde1d63dfa2d166e8c066",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_network",
+ "name": "tests/unit/modules",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/data_input_network/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_network/tasks/main.yml",
+ "name": "tests/unit/modules/conftest.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0a3ac618c8005edf3c8dbda3493d4f7244273866c8d7b32765a4c13f7b09513",
+ "chksum_sha256": "e0ae70fa7c2a5e56d7f02a47c3602398cff60c8eb021772ac59a76df2a234048",
"format": 1
},
{
- "name": "tests/integration/targets/data_input_network/aliases",
+ "name": "tests/unit/modules/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
+ "chksum_sha256": "b1225bd17ba108d2e1f0532b303d58e869af59775e9a4f98faacb2ff4c29491f",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_event",
+ "name": "tests/unit/compat",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/compat/unittest.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "727203a3846be41893b78a4b77852a1658925e936fb19539551958a5d8e8fb81",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml",
+ "name": "tests/unit/compat/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_event/aliases",
+ "name": "tests/unit/compat/mock.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
+ "chksum_sha256": "b91b074a0bb9cfda8560f02aa3cefe0bfaae9b045f8386597bfe342f1e5a0717",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events",
+ "name": "tests/.keep",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "tests/integration",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults",
+ "name": "tests/integration/targets",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
+ "chksum_sha256": "75d83a8aa1677129e967a13404e097759d4685abd50f922d149cb45ae112b00f",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2698dfe1378767d9367e9e195fe41eb8023f50e08e51d9eba97df23f2d99e704",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "35ab149685e4e0c2057584a084ccd381f93c108021fe9bbb8013ea2619b5acba",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926",
+ "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
@@ -561,52 +624,52 @@
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf632cbb514f70975d14fd05391cc480a392f951cd6a427700f40fe9b3fc41b5",
+ "chksum_sha256": "b4215b589209fb50665478cb33956d81ecdf85525726f8b8ec10d274055b2b53",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "818a2113dae79493f6f35b99bb494aa2ffed0491a8e72529195d55dd4c40b649",
+ "chksum_sha256": "cd10837d721353aedf5eed0f4bd87630ec782a1205dac2f033ccea2bd6beb862",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4215b589209fb50665478cb33956d81ecdf85525726f8b8ec10d274055b2b53",
+ "chksum_sha256": "b7bf08866ffeeb492422f01e625599ba6e56f7b8a1c93e415694a46c4e93dff2",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd10837d721353aedf5eed0f4bd87630ec782a1205dac2f033ccea2bd6beb862",
+ "chksum_sha256": "c0c83dd1f31499dcd5ff7236f9457bfa0cc614fa62c17a002f6d97970667d6dd",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "706cb1d57331d919acdbdf6124adedad8bb394d9377423510d483a60b3713fe5",
+ "chksum_sha256": "818a2113dae79493f6f35b99bb494aa2ffed0491a8e72529195d55dd4c40b649",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aac96a89d717e3c0e686c626deaf73be71f8a4731bd4304e328a1c485f56d242",
+ "chksum_sha256": "706cb1d57331d919acdbdf6124adedad8bb394d9377423510d483a60b3713fe5",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0c83dd1f31499dcd5ff7236f9457bfa0cc614fa62c17a002f6d97970667d6dd",
+ "chksum_sha256": "aac96a89d717e3c0e686c626deaf73be71f8a4731bd4304e328a1c485f56d242",
"format": 1
},
{
@@ -620,203 +683,196 @@
"name": "tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe97f0642079f50d8d7b02ddd3e36d6e7b004a642b8215ffde24b0df2c07ed51",
+ "chksum_sha256": "0e57d702453e71b6a0a47027fa4da3a6a248e3469df3cb49dd3dbebc6aea77af",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches",
+ "name": "tests/integration/targets/data_input_monitor",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests",
+ "name": "tests/integration/targets/data_input_monitor/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml",
+ "name": "tests/integration/targets/data_input_monitor/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5fbf065a46cb46b48cc237274cdb1f6e004ec2885f44990a10289cc9fc8329d",
+ "chksum_sha256": "830652bdd1f9199af2ac8f8b2cd730cd35db6dfc61dfd800aa7415eb3c6c4d5a",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml",
+ "name": "tests/integration/targets/data_input_monitor/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19fc16bcb198789fbe56f05f5b4b7bb194139b864a521958e5699b11c63e83e4",
+ "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a60dcc8eaab017ddcc9e55ef06804ea804499160cee75ca7e6dbe25c194fc48f",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6121b05fcafc9f47ead22cc8b5e212a1a821198fefdce786bbde842194d0ebea",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/merged.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53fe8e8c6ba2cd4b7ef89d65a9b1f183ad8397fbd3f49260bd5608da896d788c",
+ "chksum_sha256": "75d83a8aa1677129e967a13404e097759d4685abd50f922d149cb45ae112b00f",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9f73da67af2ad998e89ed256e64ccbe1e6b96d1d75a0f0227f5f0ffd9edc2605",
+ "chksum_sha256": "2698dfe1378767d9367e9e195fe41eb8023f50e08e51d9eba97df23f2d99e704",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86b85acedcb5e72aba8ba4005f07e46645f900100e459c27182f1e73341118c4",
- "format": 1
- },
- {
- "name": "tests/integration/targets/splunk_data_inputs_monitor",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "35ab149685e4e0c2057584a084ccd381f93c108021fe9bbb8013ea2619b5acba",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e",
+ "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/meta",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926",
+ "chksum_sha256": "79f6ea017ee5dded02ec4b04123878e75901c7d126368f4ae0caa2ec0ecf239c",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da",
+ "chksum_sha256": "b06bd768c434a96678643fc831fcf48d740ade9739e19b213702cfbc931c4386",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7",
+ "chksum_sha256": "b6a758fbeb133eb7f298e9e605f037c1a1427ed9a6e80b2ca256bd7d11e8434a",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b72143772c2812141447499965f8a7a6b799dc22ce015ad4f085846df08cd20",
+ "chksum_sha256": "fe45cf5a2cd829d01c26f1be102b827c16c23cc2284f3016a08eb59a81b48c37",
"format": 1
},
{
"name": "tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eaae57d96ac6a30535e9d7f14a95a8cdbbdb8810b6aa499f6374401f2c29cb50",
+ "chksum_sha256": "04c865a7b363702bd318318f2e5e70eb135dec988375aabc16e9a56015dcd656",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "190a32c828efdc02b4ea7786c6cf95a0b92519c13fb77bae3cc3eb9d8f8e30e7",
+ "chksum_sha256": "aa5d70ed09680b437130b6b55718b474415e97a0f075bd62c522db53f0eac574",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b06bd768c434a96678643fc831fcf48d740ade9739e19b213702cfbc931c4386",
+ "chksum_sha256": "4c78d6ebbda27b9916179608ef93637e90ac80f57255a331555ea26f45054be7",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1eea1a88f4575d29e487e2b6577717d01f1ea001646ee7765a50fbfafaadf461",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2db716888bcb097e33649add10d56ed82e28d58d2baf44d334c788c898c5d6e8",
+ "chksum_sha256": "73c7929852f5de88e0ec02e21db40db4f332ed6d1a28d379f983ee74ee1e6702",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b4ba5c6add39a3f87a2159f877d7a18ddf0749ab74cc4513efdbe4feaa594ae6",
+ "name": "tests/integration/targets/adaptive_response_notable_event",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/vars",
+ "name": "tests/integration/targets/adaptive_response_notable_event/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml",
+ "name": "tests/integration/targets/adaptive_response_notable_event/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f70db8cb1ea0840ea3693b3c43f76121cd90e903becdeaf54d7c2d9b272c0842",
+ "chksum_sha256": "e681bae3759ce71146a56bd2e882eaafe71789eecf0c57bb9acb8b037857a639",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/adaptive_response_notable_event/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
"format": 1
},
{
@@ -827,59 +883,59 @@
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/defaults",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e",
+ "chksum_sha256": "75d83a8aa1677129e967a13404e097759d4685abd50f922d149cb45ae112b00f",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2698dfe1378767d9367e9e195fe41eb8023f50e08e51d9eba97df23f2d99e704",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/meta/main.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
+ "chksum_sha256": "35ab149685e4e0c2057584a084ccd381f93c108021fe9bbb8013ea2619b5acba",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tasks",
+ "name": "tests/integration/targets/splunk_data_inputs_network/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926",
+ "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da",
+ "name": "tests/integration/targets/splunk_data_inputs_network/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
@@ -890,52 +946,52 @@
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4c9ce1df1a633f9348532c18146b0f28af7a79c9c339abd983db7d95c858952",
+ "chksum_sha256": "8ab841171f1275649e5f0387b3c3612f3a3120e76c4e3976062d043d3305c3d7",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0c4618e61535d79ef8ea6951e8374a91532579411dd3bbc79efd50645be8d53",
+ "chksum_sha256": "116734780772a8a3286d670d0bbc0232a3f7871050ecdaacc919071585101ab1",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ab841171f1275649e5f0387b3c3612f3a3120e76c4e3976062d043d3305c3d7",
+ "chksum_sha256": "dbcd1a81588235fd4991e38359bcb3cce7380b5f70f62cb13ddbe9fa70d8a240",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "116734780772a8a3286d670d0bbc0232a3f7871050ecdaacc919071585101ab1",
+ "chksum_sha256": "5ed019e6534f43346211688cd7f924c9bffddca0df3e85e75dfc83f212b9729c",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80968b09338373bf614e4510b608111cb31c4c51f0b58b9568b8c365895252a5",
+ "chksum_sha256": "c8aab1abb76b8d4b42b9f991f0f546c87511de0da3f1bc90b5788ec53c2e73d6",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a6151569c6aafb3d8316e6beff968d80d4d9ba1260093fc53b3f53760d0db11",
+ "chksum_sha256": "3c4806a2e130867b96f6ef9a9dd4e6b96397b714aa69f25c54615a5befbfec0e",
"format": 1
},
{
- "name": "tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml",
+ "name": "tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "38643e8fba135049b1cd332a1bec01f175b5c570aadb713cb844268433450f34",
+ "chksum_sha256": "30eced738847a0eaeb7c8f355943ebaa986ff5278c6b799e0f2198dec5eeb7bf",
"format": 1
},
{
@@ -949,343 +1005,427 @@
"name": "tests/integration/targets/splunk_data_inputs_network/vars/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af5764a4d038986e76309b592c51baa99b80ee8d643778899693913705a3efa8",
+ "chksum_sha256": "2a7c94a43042b30003afc16c24926e5cd8b55e3728213fb9b142fe186fa56a1d",
"format": 1
},
{
- "name": "tests/integration/network-integration.cfg",
+ "name": "tests/integration/targets/correlation_search_info",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/correlation_search_info/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/correlation_search_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d67b11263a8f50b30bf43c7c2b4bdd8dc4f173f0b5dd22761311360dfbd56a1d",
+ "chksum_sha256": "eb3f2ed0f2b95f4015ad83baa46691c279665f6d65c3ec9f659d8acb712fd7fe",
"format": 1
},
{
- "name": "tests/integration/target-prefixes.network",
+ "name": "tests/integration/targets/correlation_search_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d31a0d2ea7becadb883d33f8189e1cef71c07a907bef52c2437de1348005d004",
+ "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
"format": 1
},
{
- "name": "tests/sanity",
+ "name": "tests/integration/targets/data_input_network",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.10.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a",
+ "name": "tests/integration/targets/data_input_network/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.11.txt",
+ "name": "tests/integration/targets/data_input_network/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a",
+ "chksum_sha256": "ae146cdddedf56fb025febe1d58acecc0d0ea6f17b23487747209884d822dad8",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.9.txt",
+ "name": "tests/integration/targets/data_input_network/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08d97dac9d8352b03ec85ec50e608cca29017b7286176a7a73b00f420e6475df",
+ "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
"format": 1
},
{
- "name": "tests/unit",
+ "name": "tests/integration/targets/splunk_correlation_searches",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/compat",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/compat/__init__.py",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fd14f57ea1fcbb5e29cb47875084bcf9c5f1910a272ea2950cff6d6a1ea76291",
"format": 1
},
{
- "name": "tests/unit/compat/builtins.py",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba13a350ade8ef804336f888d5883b8e54f8bddfb9d0fadc10277a8ca6540f4e",
+ "chksum_sha256": "a60dcc8eaab017ddcc9e55ef06804ea804499160cee75ca7e6dbe25c194fc48f",
"format": 1
},
{
- "name": "tests/unit/compat/mock.py",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52ecd54195edca933104eb3e937547c7395ff604ada2694a8b184c2c1466dbf1",
+ "chksum_sha256": "6121b05fcafc9f47ead22cc8b5e212a1a821198fefdce786bbde842194d0ebea",
"format": 1
},
{
- "name": "tests/unit/compat/unittest.py",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ed698b1faec43d87a2c1ebcb15a2aae48b09ff355bb9a598e5f5a1c928dbb30",
+ "chksum_sha256": "d52d1d04b5f1d0fc291ec25ec569f897ca874592ed48f15e0c072a48f09213ed",
"format": 1
},
{
- "name": "tests/unit/mock",
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/merged.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1726bebcf55dfe12bacbe9e09f868660cbb95349985308ca86484ef55dfb33f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "75cbab8313ae82308360089f8777c1b3cd20e8657e1ae8f12a4bf233570472f1",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6a0579e74bff632794071c601cb50f1f65c51b3f87e7a07aa129eabb6f59ec31",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_event",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/mock/__init__.py",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/unit/mock/loader.py",
+ "name": "tests/integration/targets/splunk_adaptive_response_notable_event/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99243cafb4042ee1354d81e3f21647b18bba2b81e1bcd0d77d5487d6069740b9",
+ "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1",
"format": 1
},
{
- "name": "tests/unit/mock/path.py",
+ "name": "tests/integration/target-prefixes.network",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77760b066115f34f1ecce2387f8194ba254f3dc44ed89f439f3e6adfd258cdf1",
+ "chksum_sha256": "d31a0d2ea7becadb883d33f8189e1cef71c07a907bef52c2437de1348005d004",
"format": 1
},
{
- "name": "tests/unit/mock/procenv.py",
+ "name": "tests/integration/network-integration.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7f9e134ebd607e1b2910d62cd8997535c8a2cced4473a2bf5cdaae2233e3049",
+ "chksum_sha256": "d67b11263a8f50b30bf43c7c2b4bdd8dc4f173f0b5dd22761311360dfbd56a1d",
"format": 1
},
{
- "name": "tests/unit/mock/vault_helper.py",
+ "name": "docs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/splunk.es.splunk_correlation_searches_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "feae23166b6eb502f7d9b77c314970516c9a99aaad7de01295b4dfdad53c5c09",
+ "chksum_sha256": "f750f5d1889ad4bac8332e4d7a7f22c8cf11609b9a5384a3b2dca9bfecbde20d",
"format": 1
},
{
- "name": "tests/unit/mock/yaml_helper.py",
+ "name": "docs/splunk.es.data_input_monitor_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94e2f3c867d2582c9f7a0e99e544718e355025c4a51c9925e70158fa89b3609e",
+ "chksum_sha256": "c03916b59ffdc3418b17cc5cd352cdc417d73cc18430267de747f8e788c77d8f",
"format": 1
},
{
- "name": "tests/unit/modules",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/splunk.es.splunk_data_inputs_network_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1e06125d575b50b9763e20959db14e80ca36be253b8747ecff20b58f0c2467e0",
"format": 1
},
{
- "name": "tests/unit/modules/__init__.py",
+ "name": "docs/splunk.es.adaptive_response_notable_event_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "822e165960d85e54ad2e99dd81f1d5a64337dc3c283c4d110de40ade203e2064",
"format": 1
},
{
- "name": "tests/unit/modules/conftest.py",
+ "name": "docs/splunk.es.correlation_search_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2af4846e50d461a131ad3edfb609fbb39a9eb1796048c62e4ead8234bcf5c6a1",
+ "chksum_sha256": "cc5f185336595c92d966668a1bf632162befa22b7b2875d180d4226d4e45d48d",
"format": 1
},
{
- "name": "tests/unit/modules/utils.py",
+ "name": "docs/splunk.es.correlation_search_info_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecb4e4c4a3a490b49d33d043d246bea11580cfe5460e70630a793c2ffd0ff450",
+ "chksum_sha256": "e374b7c71d8a2b47033ef37218c3f1e7669239f4ab03ae1cd24d8c39adfcee3c",
"format": 1
},
{
- "name": "tests/unit/plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/splunk.es.data_input_network_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bcf5cf564eaf42f56e7471156a2ee488d65923275f3065dd4394153d259e88cb",
"format": 1
},
{
- "name": "tests/unit/plugins/action",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/splunk.es.splunk_httpapi.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b7b00b66d8d113d97580211a4984c28a84031a259ef8649a2fc13d24f7be2adc",
"format": 1
},
{
- "name": "tests/unit/plugins/action/__init__.py",
+ "name": "docs/splunk.es.splunk_adaptive_response_notable_events_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "062e11ed229204985efaad7b9e4c71f1cd9b614bbfde3163eb0354b111ba1981",
"format": 1
},
{
- "name": "tests/unit/plugins/action/test_es_adaptive_response_notable_events.py",
+ "name": "docs/splunk.es.splunk_data_inputs_monitor_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ce521e25bf0bb6ebfa6ff498bbcb272a9ab62c2ddd8a79c4ca84e977a93f5c0",
+ "chksum_sha256": "c427af6af42efd027cc2d8ecf4b8d09a9d756860122f8d745cc7757b797fe14d",
"format": 1
},
{
- "name": "tests/unit/plugins/action/test_es_correlation_searches.py",
+ "name": ".isort.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "711a328eafd5199109f2d7d90f8a7336df70515444e6c552282809895a7777b9",
+ "chksum_sha256": "638db45283707172a419b73f214f5b3a0112f7e6ff264f4e17627591908d9c53",
"format": 1
},
{
- "name": "tests/unit/plugins/action/test_es_data_inputs_monitors.py",
+ "name": "pyproject.toml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adeada7519c366c59c0fa3af0e1fdbbe1544ba780f9d43d29235364205b6376e",
+ "chksum_sha256": "1cb6a45dfa2625123890b93ad7fdc156b063c16e8ae6dba11511a1d1986b0fcc",
"format": 1
},
{
- "name": "tests/unit/plugins/action/test_es_data_inputs_network.py",
+ "name": ".darglint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64822f8ae4580b5e1fd4e8f1dccc201b2400c1bb3241908126f66812197a2a4b",
+ "chksum_sha256": "954a7045c6fa17fddfe80995f7f8251efb6df1a2b05eaf479afca6bbc6dfd4f2",
"format": 1
},
{
- "name": "tests/unit/plugins/modules",
+ "name": "bindep.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "80645079eb025b3a905b4775ac545d080a3d7d35d537c31e04f7197c94315ab5",
+ "format": 1
+ },
+ {
+ "name": "changelogs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/plugins/modules/__init__.py",
+ "name": "changelogs/fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/.keep",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/conftest.py",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2af4846e50d461a131ad3edfb609fbb39a9eb1796048c62e4ead8234bcf5c6a1",
+ "chksum_sha256": "c8b667733d0f4f0d6e4bbbaa3dcaa82a9cff3b6680163dc81352e08e28979fbd",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/utils.py",
+ "name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecb4e4c4a3a490b49d33d043d246bea11580cfe5460e70630a793c2ffd0ff450",
+ "chksum_sha256": "a86bd976282bb2064053fdfdadbe10a07327897b5b1fc15aed0338da1f947868",
"format": 1
},
{
- "name": "tests/unit/__init__.py",
+ "name": "tox.ini",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "beb3313789623e5570d0871f6115ba563a0d92ea75e8e89cbd9f79045b4fe279",
"format": 1
},
{
- "name": "tests/unit/requirements.txt",
+ "name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49ba996dc4735c3463e9af561344346dfae14bcc1a68096ce78364b377f0df1f",
+ "chksum_sha256": "c3af46270e73ef5c8a1af78a38e6a1865ad9259d390c7d9ebaa226991b8c1293",
"format": 1
},
{
- "name": "tests/.keep",
+ "name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "bdf0b6d5eef82d2ed14ee0067b052dee3114a1ab0ad261235ef7ce2e2b1f6f9d",
"format": 1
},
{
- "name": ".gitignore",
+ "name": ".pre-commit-config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fabb75e1cd5126877f40c30627431348b568ccd5d2df55fcbcfff03fc9d378d",
+ "chksum_sha256": "08940a10222925e0f47d36e2457f1b3e7045ed20b4e72989208b83cda88a2c25",
"format": 1
},
{
- "name": ".pre-commit-config.yaml",
+ "name": ".prettierignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ffea7c1ca77ac9b2775cb35aea17410b1d8dc0de785de7830e08c870a4a95fd",
+ "chksum_sha256": "9881cacd6494858bc3c50f32917a7971c275f5dbeaa27d438985eacb344f9857",
"format": 1
},
{
- "name": ".yamllint",
+ "name": ".github",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": ".github/dependabot.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "827ef9e031ecdcaf137be239d33ef93fcbbc3611cbb6b30b0e507d0e03373d0e",
+ "chksum_sha256": "11ceee3c57116e9fd08bc423414b1095ff002aa012d6fb325b1a7e24d7e28461",
"format": 1
},
{
- "name": "CHANGELOG.rst",
+ "name": ".github/CODEOWNERS",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44b1f26af8e504a426e7f976c7dea43ffb5e1d51329aeb8238b8303a63503128",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "LICENSE",
+ "name": ".github/workflows",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": ".github/workflows/codecoverage.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "chksum_sha256": "8bbcae622f5e51798b577df290135e846244399c2a6ccbfedac523b38a8330a3",
"format": 1
},
{
- "name": "README.md",
+ "name": ".github/workflows/lint.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8097af964343852b59aedd108bc36f05d7a849da0a618f3d5fd9aa49108df653",
+ "chksum_sha256": "4aeac57f14d48d82a859c822f84a0573419958bc70708f19caa6daac5c771ff9",
"format": 1
},
{
- "name": "bindep.txt",
+ "name": ".github/workflows/ack.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f20e24c3b24e3738a72623924e20848bb3bab9ea951099b7d2fcce091b9673a8",
+ "chksum_sha256": "24c6fbafaa69e3e3ee696f2cefa5120794b62cef7e870553dddce8b8af0a127c",
"format": 1
},
{
- "name": "pyproject.toml",
+ "name": ".github/workflows/release.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3128f98117f549f979538e147feb1e53fc5bac8b98e22e1a7504767b692f533",
+ "chksum_sha256": "c9b7dba505905600bbb9f0d8e9956b7cc20f80edfa742dfdae9395542f417aa1",
"format": 1
},
{
- "name": "requirements.txt",
+ "name": ".github/workflows/push.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "9abb363291d14b5184a0d01f9c471a3e64a117a028fd1f6f1af9963da6ed4427",
"format": 1
},
{
- "name": "test-requirements.txt",
+ "name": ".github/workflows/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eaa5e13a5ebe3344585b2e5ac61a6974a6d5b132f13a815d3a0f68c36ecfe8ad",
+ "chksum_sha256": "8fb1afcaa5cfa37b36b81fd9ddf20565f0d77700b0fa80659af199456e743850",
"format": 1
},
{
- "name": "tox.ini",
+ "name": ".github/release-drafter.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7cbc785c8661033cd9dd7a60897484fce731cbe3dc124a689fc3e934b1d974fb",
+ "format": 1
+ },
+ {
+ "name": "cspell.config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa849abc071b00742c00b96e3df480355a2af2c60e4c920f085a9ac5616d8d4b",
+ "chksum_sha256": "e1079e6d02ed34033bf83913e7b66ff7bd042e6d8ed4c113aea31123f557deb5",
"format": 1
}
],
diff --git a/ansible_collections/splunk/es/MANIFEST.json b/ansible_collections/splunk/es/MANIFEST.json
index c420430bb..7d63d7cf1 100644
--- a/ansible_collections/splunk/es/MANIFEST.json
+++ b/ansible_collections/splunk/es/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "splunk",
"name": "es",
- "version": "2.1.0",
+ "version": "2.1.2",
"authors": [
"Ansible Seurity Team (https://github.com/ansible-security)"
],
@@ -28,7 +28,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a812354f9fca21bf13425317ff0e8df3471be2b5c3510e889388fcbaefc924c4",
+ "chksum_sha256": "bdb2212e72f1a462883425c92aa87b55ac3c3377d56f883aea5bce1c341fb477",
"format": 1
},
"format": 1
diff --git a/ansible_collections/splunk/es/README.md b/ansible_collections/splunk/es/README.md
index cc7c0037a..c329e2dc6 100644
--- a/ansible_collections/splunk/es/README.md
+++ b/ansible_collections/splunk/es/README.md
@@ -1,6 +1,8 @@
# Splunk Enterprise Security Ansible Collection
-[![CI](https://zuul-ci.org/gated.svg)](https://dashboard.zuul.ansible.com/t/ansible/project/github.com/ansible-collections/splunk.es) <!--[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/splunk.es)](https://codecov.io/gh/ansible-collections/splunk.es)-->
+[![CI](https://zuul-ci.org/gated.svg)](https://dashboard.zuul.ansible.com/t/ansible/project/github.com/ansible-collections/splunk.es)
+[![Codecov](https://codecov.io/gh/ansible-collections/splunk.es/branch/main/graph/badge.svg)](https://codecov.io/gh/ansible-collections/splunk.es)
+[![CI](https://github.com/ansible-collections/splunk.es/actions/workflows/tests.yml/badge.svg?branch=main&event=schedule)](https://github.com/ansible-collections/splunk.es/actions/workflows/tests.yml)
This is the [Ansible
Collection](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html)
@@ -18,6 +20,8 @@ and provide feedback about.
This collection has been tested against following Ansible versions: **>=2.9.10**.
+For collections that support Ansible 2.9, please ensure you update your `network_os` to use the
+fully qualified collection name (for example, `cisco.ios.ios`).
Plugins and modules within a collection may be tested with only specific Ansible versions.
A collection may contain metadata that identifies these versions.
PEP440 is the schema used to describe the versions of Ansible.
@@ -89,7 +93,7 @@ collections:
**NOTE**: For Ansible 2.9, you may not see deprecation warnings when you run your playbooks with this collection. Use this documentation to track when a module is deprecated.
-An example for using this collection to manage a log source with [Splunk Enterprise Security SIEM](https://www.splunk.com/en_us/software/enterprise-security.html) is as follows.
+An example of using this collection to manage a log source with [Splunk Enterprise Security SIEM](https://www.splunk.com/en_us/software/enterprise-security.html) is as follows.
`inventory.ini` (Note the password should be managed by a [Vault](https://docs.ansible.com/ansible/latest/user_guide/vault.html) for a production environment.
@@ -112,7 +116,7 @@ ansible_connection=httpapi
With [Ansible
Collections](https://docs.ansible.com/ansible/latest/dev_guide/developing_collections.html)
there are various ways to utilize them either by calling specific Content from
-the Collection, such as a module, by it's Fully Qualified Collection Name (FQCN)
+the Collection, such as a module, by its Fully Qualified Collection Name (FQCN)
as we'll show in this example or by defining a Collection Search Path as the
examples below will display.
@@ -125,13 +129,13 @@ shorthand options listed below exist for convenience.
---
- name: demo splunk
hosts: splunk
- gather_facts: False
+ gather_facts: false
tasks:
- name: test splunk_data_input_monitor
splunk.es.data_input_monitor:
name: "/var/log/demo.log"
state: "present"
- recursive: True
+ recursive: true
- name: test splunk_data_input_network
splunk.es.data_input_network:
name: "9001"
@@ -167,7 +171,7 @@ splunk modules without specifying the need for the FQCN.
---
- name: demo splunk
hosts: splunk
- gather_facts: False
+ gather_facts: false
collections:
- splunk.es
tasks:
@@ -175,7 +179,7 @@ splunk modules without specifying the need for the FQCN.
data_input_monitor:
name: "/var/log/demo.log"
state: "present"
- recursive: True
+ recursive: true
- name: test splunk_data_input_network
data_input_network:
name: "9001"
@@ -212,14 +216,14 @@ FQCN.
---
- name: demo splunk
hosts: splunk
- gather_facts: False
+ gather_facts: false
tasks:
- name: collection namespace block
- name: test splunk_data_input_monitor
data_input_monitor:
name: "/var/log/demo.log"
state: "present"
- recursive: True
+ recursive: true
- name: test splunk_data_input_network
data_input_network:
name: "9001"
@@ -287,4 +291,3 @@ See [LICENSE](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
## Author Information
[Ansible Security Automation Team](https://github.com/ansible-security)
-
diff --git a/ansible_collections/splunk/es/bindep.txt b/ansible_collections/splunk/es/bindep.txt
index 1eeaeb4de..ba9c980fb 100644
--- a/ansible_collections/splunk/es/bindep.txt
+++ b/ansible_collections/splunk/es/bindep.txt
@@ -4,5 +4,3 @@
gcc-c++ [doc test platform:rpm]
python3-devel [test platform:rpm]
python3 [test platform:rpm]
-libssh-devel [test platform:rpm]
-libssh-dev [test platform:dpkg] \ No newline at end of file
diff --git a/ansible_collections/splunk/es/changelogs/changelog.yaml b/ansible_collections/splunk/es/changelogs/changelog.yaml
index 4ac8caeb8..dab3d4f80 100644
--- a/ansible_collections/splunk/es/changelogs/changelog.yaml
+++ b/ansible_collections/splunk/es/changelogs/changelog.yaml
@@ -2,68 +2,77 @@ ancestor: null
releases:
1.0.0:
modules:
- - description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
- name: splunk.es.adaptive_response_notable_event
- namespace: ''
- - description: Manage Splunk Enterprise Security Correlation Searches
- name: splunk.es.correlation_search
- namespace: ''
- - description: Manage Splunk Enterprise Security Correlation Searches
- name: splunk.es.correlation_search_info
- namespace: ''
- - description: Manage Splunk Data Inputs of type Monitor
- name: splunk.es.data_input_monitor
- namespace: ''
- - description: Manage Splunk Data Inputs of type TCP or UDP
- name: splunk.es.data_input_network
- namespace: ''
- release_date: '2020-06-22'
+ - description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
+ name: splunk.es.adaptive_response_notable_event
+ namespace: ""
+ - description: Manage Splunk Enterprise Security Correlation Searches
+ name: splunk.es.correlation_search
+ namespace: ""
+ - description: Manage Splunk Enterprise Security Correlation Searches
+ name: splunk.es.correlation_search_info
+ namespace: ""
+ - description: Manage Splunk Data Inputs of type Monitor
+ name: splunk.es.data_input_monitor
+ namespace: ""
+ - description: Manage Splunk Data Inputs of type TCP or UDP
+ name: splunk.es.data_input_network
+ namespace: ""
+ release_date: "2020-06-22"
1.0.1:
changes:
- release_summary:
- - Releasing 1.0.1 with updated changelog.
- release_date: '2020-08-28'
+ release_summary: Releasing 1.0.1 with updated changelog.
+ release_date: "2020-08-28"
1.0.2:
changes:
- release_summary:
- - Re-releasing 1.0.1 with updated galaxy file.
- release_date: '2020-09-1'
+ release_summary: Re-releasing 1.0.1 with updated galaxy file.
2.0.0:
changes:
bugfixes:
- - Fix ansible test sanity failures and fix flake8 issues.
+ - Fix ansible test sanity failures and fix flake8 issues.
major_changes:
- - Minimum required ansible.netcommon version is 2.5.1.
- - Updated base plugin references to ansible.netcommon.
+ - Minimum required ansible.netcommon version is 2.5.1.
+ - Updated base plugin references to ansible.netcommon.
fragments:
- - fix_sanity_issues.yaml
- - netcommon_ref_update.yaml
- - update_tests_unit_black_requirements.yaml
- release_date: '2022-04-29'
+ - fix_sanity_issues.yaml
+ - netcommon_ref_update.yaml
+ - update_tests_unit_black_requirements.yaml
+ release_date: "2022-04-29"
2.1.0:
changes:
minor_changes:
- - Added adaptive_response_notable_events resource module
- - Added data_inputs_monitors resource module
- - Added data_inputs_networks resource module
- - Added correlation_searches resource module
+ - Added adaptive_response_notable_events resource module
+ - Added correlation_searches resource module
+ - Added data_inputs_monitors resource module
+ - Added data_inputs_networks resource module
fragments:
- - data_inputs_monitors.yaml
- - data_inputs_networks.yaml
- - fix_adaptive_response_ne_description.yaml
- - fix_doc_for_sanity_failures.yaml
- - splunk_adaptive_response_notable_events.yaml
+ - data_inputs_monitors.yaml
+ - data_inputs_networks.yaml
+ - fix_adaptive_response_ne_description.yaml
+ - fix_doc_for_sanity_failures.yaml
+ - splunk_adaptive_response_notable_events.yaml
modules:
- - description: Manage Adaptive Responses notable events resource module
- name: splunk_adaptive_response_notable_events
- namespace: ansible_collections.splunk.es.plugins.modules
- - description: Splunk Enterprise Security Correlation searches resource module
- name: splunk_correlation_searches
- namespace: ansible_collections.splunk.es.plugins.modules
- - description: Splunk Data Inputs of type Monitor resource module
- name: splunk_data_inputs_monitor
- namespace: ansible_collections.splunk.es.plugins.modules
- - description: Manage Splunk Data Inputs of type TCP or UDP resource module
- name: splunk_data_inputs_network
- namespace: ansible_collections.splunk.es.plugins.modules
- release_date: '2022-09-07'
+ - description: Manage Adaptive Responses notable events resource module
+ name: splunk_adaptive_response_notable_events
+ namespace: ansible_collections.splunk.es.plugins.modules
+ - description: Splunk Enterprise Security Correlation searches resource module
+ name: splunk_correlation_searches
+ namespace: ansible_collections.splunk.es.plugins.modules
+ - description: Splunk Data Inputs of type Monitor resource module
+ name: splunk_data_inputs_monitor
+ namespace: ansible_collections.splunk.es.plugins.modules
+ - description: Manage Splunk Data Inputs of type TCP or UDP resource module
+ name: splunk_data_inputs_network
+ namespace: ansible_collections.splunk.es.plugins.modules
+ release_date: "2022-09-07"
+ 2.1.1:
+ changes:
+ release_summary: Releasing version 2.1.1, featuring various maintenance updates.
+ release_date: "2023-11-17"
+ 2.1.2:
+ changes:
+ bugfixes:
+ - Fixed argspec validation for plugins with empty task attributes when run with
+ Ansible 2.9.
+ fragments:
+ - module_plugins_bug_fix.yml
+ release_date: "2023-11-29"
diff --git a/ansible_collections/splunk/es/changelogs/config.yaml b/ansible_collections/splunk/es/changelogs/config.yaml
index 3988ea9bc..f403e83c7 100644
--- a/ansible_collections/splunk/es/changelogs/config.yaml
+++ b/ansible_collections/splunk/es/changelogs/config.yaml
@@ -1,4 +1,4 @@
-changelog_filename_template: CHANGELOG.rst
+changelog_filename_template: ../CHANGELOG.rst
changelog_filename_version_depth: 0
changes_file: changelog.yaml
changes_format: combined
@@ -10,21 +10,21 @@ prelude_section_name: release_summary
prelude_section_title: Release Summary
flatmap: true
sections:
-- - major_changes
- - Major Changes
-- - minor_changes
- - Minor Changes
-- - breaking_changes
- - Breaking Changes / Porting Guide
-- - deprecated_features
- - Deprecated Features
-- - removed_features
- - Removed Features (previously deprecated)
-- - security_fixes
- - Security Fixes
-- - bugfixes
- - Bugfixes
-- - known_issues
- - Known Issues
+ - - major_changes
+ - Major Changes
+ - - minor_changes
+ - Minor Changes
+ - - breaking_changes
+ - Breaking Changes / Porting Guide
+ - - deprecated_features
+ - Deprecated Features
+ - - removed_features
+ - Removed Features (previously deprecated)
+ - - security_fixes
+ - Security Fixes
+ - - bugfixes
+ - Bugfixes
+ - - known_issues
+ - Known Issues
title: Splunk Enterprise Security Collection
trivial_section_name: trivial
diff --git a/ansible_collections/splunk/es/codecov.yml b/ansible_collections/splunk/es/codecov.yml
new file mode 100644
index 000000000..c77c91a90
--- /dev/null
+++ b/ansible_collections/splunk/es/codecov.yml
@@ -0,0 +1,15 @@
+---
+codecov:
+ require_ci_to_pass: true
+comment:
+ layout: " diff, flags, files"
+ behavior: default
+ require_changes: false
+ require_base: false
+ require_head: true
+coverage:
+ status:
+ patch: false
+ project:
+ default:
+ threshold: 0.3%
diff --git a/ansible_collections/splunk/es/cspell.config.yaml b/ansible_collections/splunk/es/cspell.config.yaml
new file mode 100644
index 000000000..20e7d8842
--- /dev/null
+++ b/ansible_collections/splunk/es/cspell.config.yaml
@@ -0,0 +1,37 @@
+---
+dictionaryDefinitions:
+ - name: words
+ path: .config/dictionary.txt
+ addWords: true
+dictionaries:
+ - bash
+ - networking-terms
+ - python
+ - words
+ - "!aws"
+ - "!backwards-compatibility"
+ - "!cryptocurrencies"
+ - "!cpp"
+ignorePaths:
+ # All dot files in the root
+ - \.*
+ # This file
+ - cspell.config.yaml
+ # Generated rst files in the docs directory
+ - docs/*.rst
+ # The mypy configuration file
+ - mypy.ini
+ # The shared file for tool configuration
+ - pyproject.toml
+ # requirements.txt
+ - requirements.txt
+ # test-requirements.txt
+ - test-requirements.txt
+ # the bindep file
+ - bindep.txt
+ # The tox configuration file
+ - tox.ini
+
+languageSettings:
+ - languageId: python
+ allowCompoundWords: false
diff --git a/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst b/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst
index 4f2462652..c21dab991 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst
@@ -260,6 +260,7 @@ Parameters
</div>
</td>
<td>
+ <b>Default:</b><br/><div style="color: blue">[]</div>
</td>
<td>
<div>List of adaptive responses that should be run next</div>
@@ -277,6 +278,7 @@ Parameters
</div>
</td>
<td>
+ <b>Default:</b><br/><div style="color: blue">[]</div>
</td>
<td>
<div>List of adaptive responses that are recommended to be run next</div>
diff --git a/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst b/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst
index e4b7beb00..0ab4be9be 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst
@@ -71,7 +71,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, the index value is checked to ensure that it is the name of a valid index.</div>
+ <div>If set to <code>true</code>, the index value is checked to ensure that it is the name of a valid index.</div>
</td>
</tr>
<tr>
@@ -90,7 +90,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, the name value is checked to ensure that it exists.</div>
+ <div>If set to <code>true</code>, the name value is checked to ensure that it exists.</div>
</td>
</tr>
<tr>
@@ -143,7 +143,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, files that are seen for the first time is read from the end.</div>
+ <div>If set to <code>true</code>, files that are seen for the first time is read from the end.</div>
</td>
</tr>
<tr>
@@ -253,7 +253,7 @@ Parameters
</ul>
</td>
<td>
- <div>Setting this to False prevents monitoring of any subdirectories encountered within this data input.</div>
+ <div>Setting this to false prevents monitoring of any subdirectories encountered within this data input.</div>
</td>
</tr>
<tr>
@@ -351,7 +351,7 @@ Examples
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
- recursive: True
+ recursive: true
diff --git a/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst b/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst
index fb48a05d7..3686ab253 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst
@@ -232,7 +232,7 @@ Parameters
<td>
<div>Set the source type for events from this input.</div>
<div>&quot;sourcetype=&quot; is automatically prepended to &lt;string&gt;.</div>
- <div>Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).</div>
+ <div>Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=false).</div>
</td>
</tr>
<tr>
diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst
index 4838de449..fdd3a9946 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst
@@ -532,19 +532,19 @@ Examples
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: merged
@@ -611,19 +611,19 @@ Examples
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: replaced
diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst
index 76295b5dd..78b87b0f4 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst
@@ -694,7 +694,7 @@ Examples
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: >
'| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
@@ -801,7 +801,7 @@ Examples
throttle_fields_to_group_by:
- test_field1
- test_field2
- suppress_alerts: True
+ suppress_alerts: true
search: >
'| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai'
diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst
index 54cb445ea..7e79bcaaa 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst
@@ -83,7 +83,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, the index value is checked to ensure that it is the name of a valid index.</div>
+ <div>If set to <code>true</code>, the index value is checked to ensure that it is the name of a valid index.</div>
<div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div>
</td>
</tr>
@@ -104,7 +104,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, the name value is checked to ensure that it exists.</div>
+ <div>If set to <code>true</code>, the name value is checked to ensure that it exists.</div>
<div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div>
</td>
</tr>
@@ -161,7 +161,7 @@ Parameters
</ul>
</td>
<td>
- <div>If set to <code>True</code>, files that are seen for the first time is read from the end.</div>
+ <div>If set to <code>true</code>, files that are seen for the first time is read from the end.</div>
</td>
</tr>
<tr>
@@ -436,8 +436,8 @@ Examples
config:
- name: "/var/log"
blacklist: "//var/log/[a-z]/gm"
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
rename_source: "test"
whitelist: "//var/log/[0-9]/gm"
diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst
index aa561b1f0..0f220ae49 100644
--- a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst
+++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst
@@ -405,7 +405,7 @@ Parameters
<td>
<div>Set the source type for events from this input.</div>
<div>&quot;sourcetype=&quot; is automatically prepended to &lt;string&gt;.</div>
- <div>Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).</div>
+ <div>Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).</div>
</td>
</tr>
<tr>
@@ -668,7 +668,7 @@ Examples
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
raw_tcp_done_timeout: 9
restrict_to_host: default
queue: parsingQueue
@@ -719,7 +719,7 @@ Examples
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
restrict_to_host: default
state: merged
@@ -770,7 +770,7 @@ Examples
# ],
# "before": [],
- - name: To add the Splunk SSL
+ - name: To add the Splunk SSL
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
@@ -841,7 +841,7 @@ Examples
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
index a95e4b3ed..ee6364708 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
@@ -23,18 +23,20 @@ The module file for adaptive_response_notable_events
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -42,9 +44,6 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
from ansible_collections.splunk.es.plugins.modules.splunk_adaptive_response_notable_events import (
DOCUMENTATION,
)
@@ -56,9 +55,7 @@ class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
- self.api_object = (
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
- )
+ self.api_object = "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
self.module_name = "adaptive_response_notable_events"
self.key_transform = {
"action.notable.param.default_owner": "default_owner",
@@ -80,7 +77,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -103,17 +100,13 @@ class ActionModule(ActionBase):
def save_params(self, want_conf):
param_store = {}
if "recommended_actions" in want_conf:
- param_store["recommended_actions"] = want_conf[
- "recommended_actions"
- ]
+ param_store["recommended_actions"] = want_conf["recommended_actions"]
if "extract_artifacts" in want_conf:
param_store["extract_artifacts"] = want_conf["extract_artifacts"]
if "next_steps" in want_conf:
param_store["next_steps"] = want_conf["next_steps"]
if "investigation_profiles" in want_conf:
- param_store["investigation_profiles"] = want_conf[
- "investigation_profiles"
- ]
+ param_store["investigation_profiles"] = want_conf["investigation_profiles"]
return param_store
@@ -125,8 +118,7 @@ class ActionModule(ActionBase):
if metadata["actions"] == "notable":
pass
elif (
- len(metadata["actions"].split(",")) > 0
- and "notable" not in metadata["actions"]
+ len(metadata["actions"].split(",")) > 0 and "notable" not in metadata["actions"]
):
metadata["actions"] = metadata["actions"] + ", notable"
else:
@@ -136,10 +128,7 @@ class ActionModule(ActionBase):
if "actions" in metadata:
if metadata["actions"] == "notable":
metadata["actions"] = ""
- elif (
- len(metadata["actions"].split(",")) > 0
- and "notable" in metadata["actions"]
- ):
+ elif len(metadata["actions"].split(",")) > 0 and "notable" in metadata["actions"]:
tmp_list = metadata["actions"].split(",")
tmp_list.remove(" notable")
metadata["actions"] = ",".join(tmp_list)
@@ -161,7 +150,7 @@ class ActionModule(ActionBase):
res.pop("investigation_profiles")
else:
res["investigation_profiles"] = json.loads(
- res["investigation_profiles"]
+ res["investigation_profiles"],
)
investigation_profiles = []
for keys in res["investigation_profiles"].keys():
@@ -209,12 +198,12 @@ class ActionModule(ActionBase):
if "action.notable.param.extract_artifacts" in res:
res["action.notable.param.extract_artifacts"] = json.dumps(
- res["action.notable.param.extract_artifacts"]
+ res["action.notable.param.extract_artifacts"],
)
if "action.notable.param.recommended_actions" in res:
res["action.notable.param.recommended_actions"] = ",".join(
- res["action.notable.param.recommended_actions"]
+ res["action.notable.param.recommended_actions"],
)
if "action.notable.param.investigation_profiles" in res:
@@ -222,7 +211,7 @@ class ActionModule(ActionBase):
for element in res["action.notable.param.investigation_profiles"]:
investigation_profiles["profile://" + element] = {}
res["action.notable.param.investigation_profiles"] = json.dumps(
- investigation_profiles
+ investigation_profiles,
)
if "action.notable.param.next_steps" in res:
@@ -233,7 +222,7 @@ class ActionModule(ActionBase):
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
next_steps_dict = {"version": 1, "data": next_steps}
res["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if "action.notable.param.default_status" in res:
@@ -259,20 +248,20 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
- )
+ ),
)
search_result = {}
if query_dict:
search_result, metadata = self.map_params_to_object(
- query_dict["entry"][0]
+ query_dict["entry"][0],
)
else:
raise AnsibleActionFail(
"Correlation Search '{0}' doesn't exist".format(
- correlation_search_name
- )
+ correlation_search_name,
+ ),
)
return search_result, metadata
@@ -285,15 +274,14 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name, metadata = self.search_for_resource_name(
- conn_request, want_conf["correlation_search_name"]
+ conn_request,
+ want_conf["correlation_search_name"],
)
search_by_name = utils.remove_empties(search_by_name)
# Compare obtained values with a dict representing values in a 'deleted' state
diff_cmp = {
- "correlation_search_name": want_conf[
- "correlation_search_name"
- ],
+ "correlation_search_name": want_conf["correlation_search_name"],
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
}
@@ -367,7 +355,8 @@ class ActionModule(ActionBase):
remove_from_diff_compare = []
for want_conf in config:
have_conf, metadata = self.search_for_resource_name(
- conn_request, want_conf["correlation_search_name"]
+ conn_request,
+ want_conf["correlation_search_name"],
)
correlation_search_name = want_conf["correlation_search_name"]
@@ -385,17 +374,17 @@ class ActionModule(ActionBase):
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
# need to store 'recommended_actions','extract_artifacts'
# 'next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
# restoring parameters
@@ -404,7 +393,8 @@ class ActionModule(ActionBase):
changed = True
payload = self.map_objects_to_params(
- metadata, want_conf
+ metadata,
+ want_conf,
)
url = "{0}/{1}".format(
@@ -416,18 +406,20 @@ class ActionModule(ActionBase):
data=payload,
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
- conn_request=conn_request, config=[want_conf]
+ conn_request=conn_request,
+ config=[want_conf],
)
changed = True
payload = self.map_objects_to_params(
- metadata, want_conf
+ metadata,
+ want_conf,
)
url = "{0}/{1}".format(
@@ -439,7 +431,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -461,7 +453,7 @@ class ActionModule(ActionBase):
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -503,14 +495,12 @@ class ActionModule(ActionBase):
for item in config:
self._result[self.module_name]["gathered"].append(
self.search_for_resource_name(
- conn_request, item["correlation_search_name"]
- )[0]
+ conn_request,
+ item["correlation_search_name"],
+ )[0],
)
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
index 5f0daea16..b5ba500ae 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
@@ -23,17 +23,20 @@ The module file for splunk_correlation_searches
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible.plugins.action import ActionBase
+
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -41,12 +44,7 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import DOCUMENTATION
class ActionModule(ActionBase):
@@ -55,9 +53,7 @@ class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
- self.api_object = (
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
- )
+ self.api_object = "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
self.module_name = "correlation_searches"
self.key_transform = {
"disabled": "disabled",
@@ -83,7 +79,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -111,9 +107,7 @@ class ActionModule(ActionBase):
if "annotations" in want_conf:
param_store["annotations"] = want_conf["annotations"]
if "throttle_fields_to_group_by" in want_conf:
- param_store["throttle_fields_to_group_by"] = want_conf[
- "throttle_fields_to_group_by"
- ]
+ param_store["throttle_fields_to_group_by"] = want_conf["throttle_fields_to_group_by"]
return param_store
@@ -137,9 +131,7 @@ class ActionModule(ActionBase):
res["trigger_alert"] = "for each result"
if "throttle_fields_to_group_by" in res:
- res["throttle_fields_to_group_by"] = res[
- "throttle_fields_to_group_by"
- ].split(",")
+ res["throttle_fields_to_group_by"] = res["throttle_fields_to_group_by"].split(",")
if "annotations" in res:
res["annotations"] = json.loads(res["annotations"])
@@ -149,7 +141,6 @@ class ActionModule(ActionBase):
# need to check for custom annotation frameworks
for k, v in res["annotations"].items():
if k in {"cis20", "nist", "mitre_attack", "kill_chain_phases"}:
-
continue
entry = {}
entry["framework"] = k
@@ -188,7 +179,7 @@ class ActionModule(ActionBase):
if "alert.suppress.fields" in res:
res["alert.suppress.fields"] = ",".join(
- res["alert.suppress.fields"]
+ res["alert.suppress.fields"],
)
if (
@@ -196,12 +187,12 @@ class ActionModule(ActionBase):
and "custom" in res["action.correlationsearch.annotations"]
):
for ele in res["action.correlationsearch.annotations"]["custom"]:
- res["action.correlationsearch.annotations"][
- ele["framework"]
- ] = ele["custom_annotations"]
+ res["action.correlationsearch.annotations"][ele["framework"]] = ele[
+ "custom_annotations"
+ ]
res["action.correlationsearch.annotations"].pop("custom")
res["action.correlationsearch.annotations"] = json.dumps(
- res["action.correlationsearch.annotations"]
+ res["action.correlationsearch.annotations"],
)
return res
@@ -211,7 +202,7 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
- )
+ ),
)
search_result = {}
@@ -227,7 +218,8 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if search_by_name:
@@ -259,7 +251,8 @@ class ActionModule(ActionBase):
remove_from_diff_compare = []
for want_conf in config:
have_conf = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if have_conf:
@@ -282,10 +275,11 @@ class ActionModule(ActionBase):
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
# restoring parameters
@@ -304,13 +298,14 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
- conn_request=conn_request, config=[want_conf]
+ conn_request=conn_request,
+ config=[want_conf],
)
changed = True
@@ -333,7 +328,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -354,7 +349,8 @@ class ActionModule(ActionBase):
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
- "SplunkEnterpriseSecuritySuite", want_conf["app"]
+ "SplunkEnterpriseSecuritySuite",
+ want_conf["app"],
)
api_response = conn_request.create_update(
@@ -362,7 +358,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -403,20 +399,19 @@ class ActionModule(ActionBase):
self._result["gathered"] = []
for item in config:
result = self.search_for_resource_name(
- conn_request, item["name"]
+ conn_request,
+ item["name"],
)
if result:
self._result["gathered"].append(result)
for item in config:
self._result["gathered"].append(
self.search_for_resource_name(
- conn_request, item["name"]
- )
+ conn_request,
+ item["name"],
+ ),
)
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
index 7c9c03a55..e2e68841f 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
@@ -23,15 +23,17 @@ The module file for data_inputs_monitor
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible.plugins.action import ActionBase
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -39,12 +41,7 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import DOCUMENTATION
class ActionModule(ActionBase):
@@ -77,7 +74,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -102,7 +99,7 @@ class ActionModule(ActionBase):
def search_for_resource_name(self, conn_request, directory_name):
query_dict = conn_request.get_by_path(
- "{0}/{1}".format(self.api_object, quote_plus(directory_name))
+ "{0}/{1}".format(self.api_object, quote_plus(directory_name)),
)
search_result = {}
@@ -118,14 +115,16 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if search_by_name:
before.append(search_by_name)
conn_request.delete_by_path(
"{0}/{1}".format(
- self.api_object, quote_plus(want_conf["name"])
- )
+ self.api_object,
+ quote_plus(want_conf["name"]),
+ ),
)
changed = True
after = []
@@ -157,7 +156,8 @@ class ActionModule(ActionBase):
]
for want_conf in config:
have_conf = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if have_conf:
@@ -173,22 +173,24 @@ class ActionModule(ActionBase):
if diff:
diff = remove_get_keys_from_payload_dict(
- diff, remove_from_diff_compare
+ diff,
+ remove_from_diff_compare,
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
url = "{0}/{1}".format(
self.api_object,
@@ -199,7 +201,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -208,12 +210,13 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote_plus(want_conf["name"]),
- )
+ ),
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
@@ -221,7 +224,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -242,7 +245,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -257,7 +260,6 @@ class ActionModule(ActionBase):
return res_config, changed
def run(self, tmp=None, task_vars=None):
-
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
@@ -283,18 +285,16 @@ class ActionModule(ActionBase):
self._result["changed"] = False
for item in config:
result = self.search_for_resource_name(
- conn_request, item["name"]
+ conn_request,
+ item["name"],
)
if result:
self._result["gathered"].append(result)
else:
self._result["gathered"] = conn_request.get_by_path(
- self.api_object
+ self.api_object,
)["entry"]
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
index bd72d12b5..2558a05f4 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
@@ -23,28 +23,25 @@ The module file for data_inputs_network
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import DOCUMENTATION
class ActionModule(ActionBase):
@@ -79,7 +76,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -172,7 +169,7 @@ class ActionModule(ActionBase):
url = url[:-1]
else:
raise AnsibleActionFail(
- "Incompatible protocol specified. Please specify 'tcp' or 'udp'"
+ "Incompatible protocol specified. Please specify 'tcp' or 'udp'",
)
if req_type == "get":
@@ -199,7 +196,8 @@ class ActionModule(ActionBase):
if query_dict:
search_result = self.map_params_to_object(
- query_dict["entry"][0], datatype
+ query_dict["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -240,7 +238,8 @@ class ActionModule(ActionBase):
and want_conf["restrict_to_host"] not in want_conf["name"]
):
want_conf["name"] = "{0}:{1}".format(
- want_conf["restrict_to_host"], want_conf["name"]
+ want_conf["restrict_to_host"],
+ want_conf["name"],
)
# If datatype is "splunktcptoken", the value "splunktcptoken://" is appended
@@ -251,7 +250,8 @@ class ActionModule(ActionBase):
and "splunktcptoken://" not in want_conf["name"]
):
want_conf["name"] = "{0}{1}".format(
- "splunktcptoken://", want_conf["name"]
+ "splunktcptoken://",
+ want_conf["name"],
)
name = want_conf["name"]
@@ -296,7 +296,8 @@ class ActionModule(ActionBase):
raise AnsibleActionFail("No name specified")
have_conf, protocol, datatype, name, _old_name = self.parse_config(
- conn_request, want_conf
+ conn_request,
+ want_conf,
)
if protocol == "tcp" and datatype == "ssl":
@@ -336,14 +337,11 @@ class ActionModule(ActionBase):
]
have_conf, protocol, datatype, name, old_name = self.parse_config(
- conn_request, want_conf
+ conn_request,
+ want_conf,
)
- if (
- protocol == "tcp"
- and datatype == "ssl"
- and self._task.args["state"] == "replaced"
- ):
+ if protocol == "tcp" and datatype == "ssl" and self._task.args["state"] == "replaced":
raise AnsibleActionFail("Replaced state not supported for SSL")
if have_conf:
@@ -358,22 +356,24 @@ class ActionModule(ActionBase):
if diff:
diff = remove_get_keys_from_payload_dict(
- diff, remove_from_diff_compare
+ diff,
+ remove_from_diff_compare,
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
api_response = self.request_by_path(
conn_request,
@@ -384,7 +384,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -404,7 +405,8 @@ class ActionModule(ActionBase):
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
@@ -419,7 +421,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -449,7 +452,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -490,7 +494,6 @@ class ActionModule(ActionBase):
self._result["changed"] = False
for item in config:
if item.get("name"):
-
result = self.search_for_resource_name(
conn_request,
item["protocol"],
@@ -514,10 +517,7 @@ class ActionModule(ActionBase):
else:
raise AnsibleActionFail("No protocol specified")
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
if config:
(
self._result[self.module_return],
diff --git a/ansible_collections/splunk/es/plugins/httpapi/splunk.py b/ansible_collections/splunk/es/plugins/httpapi/splunk.py
index 91f079e06..095a7b71c 100644
--- a/ansible_collections/splunk/es/plugins/httpapi/splunk.py
+++ b/ansible_collections/splunk/es/plugins/httpapi/splunk.py
@@ -3,6 +3,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -18,13 +19,12 @@ version_added: "1.0.0"
import json
-from ansible.module_utils.basic import to_text
from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import (
- HttpApiBase,
-)
+from ansible.module_utils.basic import to_text
from ansible.module_utils.connection import ConnectionError
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import HttpApiBase
+
BASE_HEADERS = {"Content-Type": "application/json"}
@@ -47,7 +47,8 @@ class HttpApi(HttpApiBase):
return response.getcode(), self._response_to_json(value)
except AnsibleConnectionFailure as e:
self.connection.queue_message(
- "vvv", "AnsibleConnectionFailure: %s" % e
+ "vvv",
+ "AnsibleConnectionFailure: %s" % e,
)
if to_text("Could not connect to") in to_text(e):
raise
@@ -62,8 +63,7 @@ class HttpApi(HttpApiBase):
def _display_request(self, request_method, path):
self.connection.queue_message(
"vvvv",
- "Web Services: %s %s/%s"
- % (request_method, self.connection._url, path),
+ "Web Services: %s %s/%s" % (request_method, self.connection._url, path),
)
def _get_response_value(self, response_data):
diff --git a/ansible_collections/splunk/es/plugins/module_utils/splunk.py b/ansible_collections/splunk/es/plugins/module_utils/splunk.py
index 240481d3a..eb5ed2755 100644
--- a/ansible_collections/splunk/es/plugins/module_utils/splunk.py
+++ b/ansible_collections/splunk/es/plugins/module_utils/splunk.py
@@ -5,16 +5,17 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
+try:
+ from ssl import CertificateError
+except ImportError:
+ from backports.ssl_match_hostname import CertificateError
-from ansible.module_utils.urls import CertificateError
-from ansible.module_utils.six.moves.urllib.parse import urlencode
-from ansible.module_utils.connection import (
- ConnectionError,
- Connection,
-)
from ansible.module_utils._text import to_text
+from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.six import iteritems
+from ansible.module_utils.six.moves.urllib.parse import urlencode
def parse_splunk_args(module):
@@ -39,8 +40,8 @@ def parse_splunk_args(module):
except TypeError as e:
module.fail_json(
msg="Invalid data type provided for splunk module_util.parse_splunk_args: {0}".format(
- e
- )
+ e,
+ ),
)
@@ -62,9 +63,7 @@ def map_params_to_obj(module_params, key_transform):
obj = {}
for k, v in iteritems(key_transform):
if k in module_params and (
- module_params.get(k)
- or module_params.get(k) == 0
- or module_params.get(k) is False
+ module_params.get(k) or module_params.get(k) == 0 or module_params.get(k) is False
):
obj[v] = module_params.pop(k)
return obj
@@ -152,19 +151,22 @@ class SplunkRequest(object):
def _httpapi_error_handle(self, method, uri, payload=None):
try:
code, response = self.connection.send_request(
- method, uri, payload=payload
+ method,
+ uri,
+ payload=payload,
)
if code == 404:
if to_text("Object not found") in to_text(response) or to_text(
- "Could not find object"
+ "Could not find object",
) in to_text(response):
return {}
if not (code >= 200 and code < 300):
self.module.fail_json(
msg="Splunk httpapi returned error {0} with message {1}".format(
- code, response
+ code,
+ response,
),
)
@@ -181,7 +183,7 @@ class SplunkRequest(object):
except ValueError as e:
try:
self.module.fail_json(
- msg="certificate not found: {0}".format(e)
+ msg="certificate not found: {0}".format(e),
)
except AttributeError:
pass
@@ -211,9 +213,7 @@ class SplunkRequest(object):
if self.legacy and not config:
config = self.module.params
for param in config:
- if (config[param]) is not None and (
- param not in self.not_rest_data_keys
- ):
+ if (config[param]) is not None and (param not in self.not_rest_data_keys):
if param in self.keymap:
splunk_data[self.keymap[param]] = config[param]
else:
@@ -223,7 +223,7 @@ class SplunkRequest(object):
except TypeError as e:
self.module.fail_json(
- msg="invalid data type provided: {0}".format(e)
+ msg="invalid data type provided: {0}".format(e),
)
def get_urlencoded_data(self, config):
@@ -252,5 +252,6 @@ class SplunkRequest(object):
if data is not None and self.override:
data = self.get_urlencoded_data(data)
return self.post(
- "/{0}?output_mode=json".format(rest_path), payload=data
+ "/{0}?output_mode=json".format(rest_path),
+ payload=data,
)
diff --git a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
index 29099424e..0947c80fc 100644
--- a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
+++ b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -49,7 +50,7 @@ options:
description:
- Splunk Security Domain
type: str
- required: False
+ required: false
choices:
- "access"
- "endpoint"
@@ -62,7 +63,7 @@ options:
description:
- Severity rating
type: str
- required: False
+ required: false
choices:
- "informational"
- "low"
@@ -75,12 +76,12 @@ options:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
choices:
- "unassigned"
- "new"
@@ -92,19 +93,19 @@ options:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
- required: False
+ required: false
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
@@ -112,20 +113,21 @@ options:
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
- required: False
+ required: false
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
- required: False
+ required: false
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
- required: False
+ required: false
+ default: []
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
@@ -134,7 +136,8 @@ options:
making it easier to find them among the longer list of available actions.
type: list
elements: str
- required: False
+ required: false
+ default: []
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
@@ -151,7 +154,7 @@ options:
- dest
- dvc
- orig_host
- required: False
+ required: false
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
@@ -164,11 +167,10 @@ options:
default:
- user
- src_user
- required: False
-
+ required: false
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
-# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+# FIXME - adaptive response action association is probably going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
@@ -187,19 +189,15 @@ EXAMPLES = """
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
@@ -244,17 +242,22 @@ def main():
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
- required=False, type="str", default="$info_min_time$"
+ required=False,
+ type="str",
+ default="$info_min_time$",
),
drill_down_latest_offset=dict(
- required=False, type="str", default="$info_max_time$"
+ required=False,
+ type="str",
+ default="$info_max_time$",
),
investigation_profiles=dict(required=False, type="str"),
- next_steps=dict(
- required=False, type="list", elements="str", default=[]
- ),
+ next_steps=dict(required=False, type="list", elements="str", default=[]),
recommended_actions=dict(
- required=False, type="list", elements="str", default=[]
+ required=False,
+ type="list",
+ elements="str",
+ default=[],
),
asset_extraction=dict(
required=False,
@@ -283,8 +286,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
- )
+ quote_plus(module.params["correlation_search_name"]),
+ ),
)
# Have to custom craft the data here because they overload the saved searches
@@ -297,9 +300,7 @@ def main():
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
- next_steps = "[[action|{0}]]".format(
- module.params["next_steps"][0]
- )
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
else:
next_steps = ""
for next_step in module.params["next_steps"]:
@@ -312,66 +313,48 @@ def main():
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = module.params["recommended_actions"][0]
+ request_post_data["action.notable.param.recommended_actions"] = module.params[
+ "recommended_actions"
+ ][0]
else:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = ",".join(module.params["recommended_actions"])
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"],
+ )
- request_post_data["action.notable.param.rule_description"] = module.params[
- "description"
- ]
- request_post_data["action.notable.param.rule_title"] = module.params[
- "name"
- ]
- request_post_data["action.notable.param.security_domain"] = module.params[
- "security_domain"
- ]
- request_post_data["action.notable.param.severity"] = module.params[
- "severity"
+ request_post_data["action.notable.param.rule_description"] = module.params["description"]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params["security_domain"]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params["asset_extraction"]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
]
- request_post_data["action.notable.param.asset_extraction"] = module.params[
- "asset_extraction"
- ]
- request_post_data[
- "action.notable.param.identity_extraction"
- ] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
- request_post_data[
- "action.notable.param.default_owner"
- ] = module.params["default_owner"]
+ request_post_data["action.notable.param.default_owner"] = module.params["default_owner"]
if module.params["default_status"]:
- request_post_data[
- "action.notable.param.default_status"
- ] = module.params["default_status"]
+ request_post_data["action.notable.param.default_status"] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
- request_post_data["search"] = query_dict["entry"][0]["content"][
- "search"
- ]
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
- len(query_dict["entry"][0]["content"]["actions"].split(","))
- > 0
- and "notable"
- not in query_dict["entry"][0]["content"]["actions"]
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
@@ -389,12 +372,14 @@ def main():
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
- request_post_data[arg]
+ request_post_data[arg],
):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -405,15 +390,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
@@ -430,7 +413,9 @@ def main():
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -441,21 +426,17 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search.py b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
index 9c865507b..1664c8c8b 100644
--- a/ansible_collections/splunk/es/plugins/modules/correlation_search.py
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,29 +26,29 @@ options:
name:
description:
- Name of coorelation search
- required: True
+ required: true
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
- required: True
+ required: true
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
- required: True
+ required: true
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
- required: True
+ required: true
app:
description:
- Splunk app to associate the correlation seach with
type: str
- required: False
+ required: false
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
@@ -55,18 +56,18 @@ options:
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
- required: False
+ required: false
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "now"
cron_schedule:
description:
@@ -74,7 +75,7 @@ options:
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
- required: False
+ required: false
default: "*/5 * * * *"
scheduling:
description:
@@ -83,7 +84,7 @@ options:
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
- required: False
+ required: false
default: "real-time"
choices:
- "real-time"
@@ -94,7 +95,7 @@ options:
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
- required: False
+ required: false
default: "0"
schedule_priority:
description:
@@ -102,7 +103,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "Default"
choices:
- "Default"
@@ -114,7 +115,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "number of events"
choices:
- "number of events"
@@ -125,7 +126,7 @@ options:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "greater than"
choices:
- "greater than"
@@ -138,24 +139,24 @@ options:
description:
- Value to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
- required: False
+ required: false
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
- required: False
+ required: false
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
- required: False
- default: False
+ required: false
+ default: false
notes:
- >
The following options are not yet supported:
@@ -174,30 +175,22 @@ EXAMPLES = """
state: "present"
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
- state=dict(
- choices=["present", "absent", "enabled", "disabled"], required=True
- ),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
search=dict(required=True, type="str"),
- app=dict(
- type="str", required=False, default="SplunkEnterpriseSecuritySuite"
- ),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
@@ -239,9 +232,7 @@ def main():
"rises by",
],
),
- trigger_alert_when_value=dict(
- type="str", required=False, default="10"
- ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
@@ -264,8 +255,8 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
@@ -283,12 +274,8 @@ def main():
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
- request_post_data["request.ui_dispatch_context"] = module.params[
- "ui_dispatch_context"
- ]
- request_post_data["dispatch.earliest_time"] = module.params[
- "time_earliest"
- ]
+ request_post_data["request.ui_dispatch_context"] = module.params["ui_dispatch_context"]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
@@ -296,16 +283,10 @@ def main():
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
- request_post_data["schedule_priority"] = module.params[
- "schedule_priority"
- ].lower()
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
- request_post_data["alert_comparator"] = module.params[
- "trigger_alert_when_condition"
- ]
- request_post_data["alert_threshold"] = module.params[
- "trigger_alert_when_value"
- ]
+ request_post_data["alert_comparator"] = module.params["trigger_alert_when_condition"]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
@@ -316,13 +297,15 @@ def main():
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_post_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -337,12 +320,14 @@ def main():
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
+ quote_plus(module.params["name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -350,16 +335,12 @@ def main():
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
- "services/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"])),
)
module.exit_json(
changed=True,
@@ -367,9 +348,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
index 0ab756989..ecb36ce66 100644
--- a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -40,15 +41,13 @@ EXAMPLES = """
"""
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -62,15 +61,15 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
index 080d23d3b..b0108d74a 100644
--- a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
state:
description:
- Add or remove a data source.
- required: True
+ required: true
choices:
- "present"
- "absent"
@@ -38,41 +39,41 @@ options:
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
- required: False
+ required: false
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
- required: False
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
+ required: false
type: bool
- default: False
+ default: false
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
- required: False
+ - If set to C(true), the name value is checked to ensure that it exists.
+ required: false
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
- required: False
+ required: false
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
- required: False
- default: False
+ required: false
+ default: false
type: bool
followTail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
- required: False
+ - If set to C(true), files that are seen for the first time is read from the end.
+ required: false
type: bool
- default: False
+ default: false
host:
description:
- The value to populate in the host field for events from this data input.
- required: False
+ required: false
type: str
host_regex:
description:
@@ -80,40 +81,40 @@ options:
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
- required: False
+ required: false
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
- required: False
+ required: false
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
- required: False
+ required: false
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
- required: False
+ required: false
type: str
recursive:
description:
- - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
- required: False
+ - Setting this to false prevents monitoring of any subdirectories encountered within this data input.
+ required: false
type: bool
- default: False
+ default: false
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
- required: False
+ required: false
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
- required: False
+ required: false
type: str
time_before_close:
description:
@@ -121,12 +122,12 @@ options:
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
- required: False
+ required: false
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
- required: False
+ required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -136,22 +137,18 @@ EXAMPLES = """
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
- recursive: True
+ recursive: true
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
@@ -197,8 +194,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
query_dict = utils.remove_empties(query_dict)
@@ -207,13 +204,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -224,11 +223,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -238,16 +239,14 @@ def main():
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
changed=True,
@@ -255,9 +254,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_network.py b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
index 5771eb9cc..14905563a 100644
--- a/ansible_collections/splunk/es/plugins/modules/data_input_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,7 +26,7 @@ options:
protocol:
description:
- Choose between tcp or udp
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -37,7 +38,7 @@ options:
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
- required: False
+ required: false
type: str
choices:
- "ip"
@@ -51,7 +52,7 @@ options:
- "absent"
- "enabled"
- "disable"
- required: False
+ required: false
default: "present"
type: str
datatype:
@@ -62,12 +63,12 @@ options:
- "cooked"
- "raw"
default: "raw"
- required: False
+ required: false
type: str
host:
description:
- Host from which the indexer gets data.
- required: False
+ required: false
type: str
index:
description:
@@ -76,7 +77,7 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
queue:
description:
@@ -89,7 +90,7 @@ options:
- "parsingQueue"
- "indexQueue"
type: str
- required: False
+ required: false
default: "parsingQueue"
rawTcpDoneTimeout:
description:
@@ -98,16 +99,16 @@ options:
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
- required: False
+ required: false
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
- required: False
+ required: false
type: str
ssl:
description:
- Enable or disble ssl for the data stream
- required: False
+ required: false
type: bool
source:
description:
@@ -126,7 +127,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=false).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -140,16 +141,14 @@ EXAMPLES = """
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
state=dict(
required=False,
@@ -178,9 +177,7 @@ def main():
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
- datatype=dict(
- required=False, choices=["cooked", "raw"], default="raw"
- ),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -198,7 +195,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
if module.params["state"] in ["present", "enabled", "disabled"]:
@@ -211,13 +208,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -236,11 +235,15 @@ def main():
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
module.exit_json(
- changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} disabled.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -251,9 +254,7 @@ def main():
),
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
@@ -261,7 +262,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
module.exit_json(
changed=True,
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
index 29099424e..0947c80fc 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -49,7 +50,7 @@ options:
description:
- Splunk Security Domain
type: str
- required: False
+ required: false
choices:
- "access"
- "endpoint"
@@ -62,7 +63,7 @@ options:
description:
- Severity rating
type: str
- required: False
+ required: false
choices:
- "informational"
- "low"
@@ -75,12 +76,12 @@ options:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
choices:
- "unassigned"
- "new"
@@ -92,19 +93,19 @@ options:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
- required: False
+ required: false
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
@@ -112,20 +113,21 @@ options:
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
- required: False
+ required: false
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
- required: False
+ required: false
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
- required: False
+ required: false
+ default: []
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
@@ -134,7 +136,8 @@ options:
making it easier to find them among the longer list of available actions.
type: list
elements: str
- required: False
+ required: false
+ default: []
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
@@ -151,7 +154,7 @@ options:
- dest
- dvc
- orig_host
- required: False
+ required: false
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
@@ -164,11 +167,10 @@ options:
default:
- user
- src_user
- required: False
-
+ required: false
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
-# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+# FIXME - adaptive response action association is probably going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
@@ -187,19 +189,15 @@ EXAMPLES = """
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
@@ -244,17 +242,22 @@ def main():
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
- required=False, type="str", default="$info_min_time$"
+ required=False,
+ type="str",
+ default="$info_min_time$",
),
drill_down_latest_offset=dict(
- required=False, type="str", default="$info_max_time$"
+ required=False,
+ type="str",
+ default="$info_max_time$",
),
investigation_profiles=dict(required=False, type="str"),
- next_steps=dict(
- required=False, type="list", elements="str", default=[]
- ),
+ next_steps=dict(required=False, type="list", elements="str", default=[]),
recommended_actions=dict(
- required=False, type="list", elements="str", default=[]
+ required=False,
+ type="list",
+ elements="str",
+ default=[],
),
asset_extraction=dict(
required=False,
@@ -283,8 +286,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
- )
+ quote_plus(module.params["correlation_search_name"]),
+ ),
)
# Have to custom craft the data here because they overload the saved searches
@@ -297,9 +300,7 @@ def main():
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
- next_steps = "[[action|{0}]]".format(
- module.params["next_steps"][0]
- )
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
else:
next_steps = ""
for next_step in module.params["next_steps"]:
@@ -312,66 +313,48 @@ def main():
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = module.params["recommended_actions"][0]
+ request_post_data["action.notable.param.recommended_actions"] = module.params[
+ "recommended_actions"
+ ][0]
else:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = ",".join(module.params["recommended_actions"])
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"],
+ )
- request_post_data["action.notable.param.rule_description"] = module.params[
- "description"
- ]
- request_post_data["action.notable.param.rule_title"] = module.params[
- "name"
- ]
- request_post_data["action.notable.param.security_domain"] = module.params[
- "security_domain"
- ]
- request_post_data["action.notable.param.severity"] = module.params[
- "severity"
+ request_post_data["action.notable.param.rule_description"] = module.params["description"]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params["security_domain"]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params["asset_extraction"]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
]
- request_post_data["action.notable.param.asset_extraction"] = module.params[
- "asset_extraction"
- ]
- request_post_data[
- "action.notable.param.identity_extraction"
- ] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
- request_post_data[
- "action.notable.param.default_owner"
- ] = module.params["default_owner"]
+ request_post_data["action.notable.param.default_owner"] = module.params["default_owner"]
if module.params["default_status"]:
- request_post_data[
- "action.notable.param.default_status"
- ] = module.params["default_status"]
+ request_post_data["action.notable.param.default_status"] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
- request_post_data["search"] = query_dict["entry"][0]["content"][
- "search"
- ]
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
- len(query_dict["entry"][0]["content"]["actions"].split(","))
- > 0
- and "notable"
- not in query_dict["entry"][0]["content"]["actions"]
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
@@ -389,12 +372,14 @@ def main():
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
- request_post_data[arg]
+ request_post_data[arg],
):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -405,15 +390,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
@@ -430,7 +413,9 @@ def main():
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -441,21 +426,17 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
index fa680a511..2ee6461ae 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -255,19 +256,19 @@ EXAMPLES = """
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: merged
@@ -334,19 +335,19 @@ EXAMPLES = """
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: replaced
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
index 9c865507b..1664c8c8b 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,29 +26,29 @@ options:
name:
description:
- Name of coorelation search
- required: True
+ required: true
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
- required: True
+ required: true
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
- required: True
+ required: true
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
- required: True
+ required: true
app:
description:
- Splunk app to associate the correlation seach with
type: str
- required: False
+ required: false
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
@@ -55,18 +56,18 @@ options:
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
- required: False
+ required: false
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "now"
cron_schedule:
description:
@@ -74,7 +75,7 @@ options:
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
- required: False
+ required: false
default: "*/5 * * * *"
scheduling:
description:
@@ -83,7 +84,7 @@ options:
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
- required: False
+ required: false
default: "real-time"
choices:
- "real-time"
@@ -94,7 +95,7 @@ options:
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
- required: False
+ required: false
default: "0"
schedule_priority:
description:
@@ -102,7 +103,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "Default"
choices:
- "Default"
@@ -114,7 +115,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "number of events"
choices:
- "number of events"
@@ -125,7 +126,7 @@ options:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "greater than"
choices:
- "greater than"
@@ -138,24 +139,24 @@ options:
description:
- Value to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
- required: False
+ required: false
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
- required: False
+ required: false
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
- required: False
- default: False
+ required: false
+ default: false
notes:
- >
The following options are not yet supported:
@@ -174,30 +175,22 @@ EXAMPLES = """
state: "present"
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
- state=dict(
- choices=["present", "absent", "enabled", "disabled"], required=True
- ),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
search=dict(required=True, type="str"),
- app=dict(
- type="str", required=False, default="SplunkEnterpriseSecuritySuite"
- ),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
@@ -239,9 +232,7 @@ def main():
"rises by",
],
),
- trigger_alert_when_value=dict(
- type="str", required=False, default="10"
- ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
@@ -264,8 +255,8 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
@@ -283,12 +274,8 @@ def main():
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
- request_post_data["request.ui_dispatch_context"] = module.params[
- "ui_dispatch_context"
- ]
- request_post_data["dispatch.earliest_time"] = module.params[
- "time_earliest"
- ]
+ request_post_data["request.ui_dispatch_context"] = module.params["ui_dispatch_context"]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
@@ -296,16 +283,10 @@ def main():
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
- request_post_data["schedule_priority"] = module.params[
- "schedule_priority"
- ].lower()
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
- request_post_data["alert_comparator"] = module.params[
- "trigger_alert_when_condition"
- ]
- request_post_data["alert_threshold"] = module.params[
- "trigger_alert_when_value"
- ]
+ request_post_data["alert_comparator"] = module.params["trigger_alert_when_condition"]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
@@ -316,13 +297,15 @@ def main():
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_post_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -337,12 +320,14 @@ def main():
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
+ quote_plus(module.params["name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -350,16 +335,12 @@ def main():
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
- "services/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"])),
)
module.exit_json(
changed=True,
@@ -367,9 +348,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
index 0ab756989..ecb36ce66 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -40,15 +41,13 @@ EXAMPLES = """
"""
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -62,15 +61,15 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
index ac834d1b9..bcecf9926 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -29,12 +30,12 @@ options:
description:
- Name of correlation search
type: str
- required: True
+ required: true
disabled:
description:
- Disable correlation search
type: bool
- default: False
+ default: false
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
@@ -192,7 +193,7 @@ options:
description:
- To suppress alerts from this correlation search or not
type: bool
- default: False
+ default: false
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
@@ -319,7 +320,7 @@ EXAMPLES = """
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
@@ -426,7 +427,7 @@ EXAMPLES = """
throttle_fields_to_group_by:
- test_field1
- test_field2
- suppress_alerts: True
+ suppress_alerts: true
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
@@ -606,7 +607,6 @@ EXAMPLES = """
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# },
# ],
-
"""
RETURN = """
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
index 080d23d3b..b0108d74a 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
state:
description:
- Add or remove a data source.
- required: True
+ required: true
choices:
- "present"
- "absent"
@@ -38,41 +39,41 @@ options:
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
- required: False
+ required: false
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
- required: False
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
+ required: false
type: bool
- default: False
+ default: false
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
- required: False
+ - If set to C(true), the name value is checked to ensure that it exists.
+ required: false
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
- required: False
+ required: false
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
- required: False
- default: False
+ required: false
+ default: false
type: bool
followTail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
- required: False
+ - If set to C(true), files that are seen for the first time is read from the end.
+ required: false
type: bool
- default: False
+ default: false
host:
description:
- The value to populate in the host field for events from this data input.
- required: False
+ required: false
type: str
host_regex:
description:
@@ -80,40 +81,40 @@ options:
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
- required: False
+ required: false
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
- required: False
+ required: false
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
- required: False
+ required: false
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
- required: False
+ required: false
type: str
recursive:
description:
- - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
- required: False
+ - Setting this to false prevents monitoring of any subdirectories encountered within this data input.
+ required: false
type: bool
- default: False
+ default: false
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
- required: False
+ required: false
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
- required: False
+ required: false
type: str
time_before_close:
description:
@@ -121,12 +122,12 @@ options:
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
- required: False
+ required: false
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
- required: False
+ required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -136,22 +137,18 @@ EXAMPLES = """
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
- recursive: True
+ recursive: true
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
@@ -197,8 +194,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
query_dict = utils.remove_empties(query_dict)
@@ -207,13 +204,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -224,11 +223,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -238,16 +239,14 @@ def main():
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
changed=True,
@@ -255,9 +254,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
index 5771eb9cc..14905563a 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,7 +26,7 @@ options:
protocol:
description:
- Choose between tcp or udp
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -37,7 +38,7 @@ options:
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
- required: False
+ required: false
type: str
choices:
- "ip"
@@ -51,7 +52,7 @@ options:
- "absent"
- "enabled"
- "disable"
- required: False
+ required: false
default: "present"
type: str
datatype:
@@ -62,12 +63,12 @@ options:
- "cooked"
- "raw"
default: "raw"
- required: False
+ required: false
type: str
host:
description:
- Host from which the indexer gets data.
- required: False
+ required: false
type: str
index:
description:
@@ -76,7 +77,7 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
queue:
description:
@@ -89,7 +90,7 @@ options:
- "parsingQueue"
- "indexQueue"
type: str
- required: False
+ required: false
default: "parsingQueue"
rawTcpDoneTimeout:
description:
@@ -98,16 +99,16 @@ options:
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
- required: False
+ required: false
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
- required: False
+ required: false
type: str
ssl:
description:
- Enable or disble ssl for the data stream
- required: False
+ required: false
type: bool
source:
description:
@@ -126,7 +127,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=false).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -140,16 +141,14 @@ EXAMPLES = """
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
state=dict(
required=False,
@@ -178,9 +177,7 @@ def main():
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
- datatype=dict(
- required=False, choices=["cooked", "raw"], default="raw"
- ),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -198,7 +195,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
if module.params["state"] in ["present", "enabled", "disabled"]:
@@ -211,13 +208,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -236,11 +235,15 @@ def main():
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
module.exit_json(
- changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} disabled.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -251,9 +254,7 @@ def main():
),
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
@@ -261,7 +262,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
module.exit_json(
changed=True,
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
index 0f4922f77..1f664afb2 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -26,7 +27,7 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
blacklist:
description:
@@ -34,13 +35,13 @@ options:
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
+ - If set to C(true), the name value is checked to ensure that it exists.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
@@ -53,10 +54,10 @@ options:
description:
- Indicates if input monitoring is disabled.
type: bool
- default: False
+ default: false
follow_tail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
+ - If set to C(true), files that are seen for the first time is read from the end.
type: bool
host:
description:
@@ -179,8 +180,8 @@ EXAMPLES = """
config:
- name: "/var/log"
blacklist: "//var/log/[a-z]/gm"
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
rename_source: "test"
whitelist: "//var/log/[0-9]/gm"
@@ -283,7 +284,6 @@ EXAMPLES = """
# "name": "/var/log"
# }
# ],
-
"""
RETURN = """
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
index 688e806f1..cf259c2d6 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
protocol:
description:
- Choose whether to manage TCP or UDP inputs
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -58,7 +59,7 @@ options:
- "raw"
- "splunktcptoken"
- "ssl"
- required: False
+ required: false
type: str
disabled:
description:
@@ -124,7 +125,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
type: str
token:
description:
@@ -358,7 +359,7 @@ EXAMPLES = """
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
raw_tcp_done_timeout: 9
restrict_to_host: default
queue: parsingQueue
@@ -409,7 +410,7 @@ EXAMPLES = """
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
restrict_to_host: default
state: merged
@@ -460,7 +461,7 @@ EXAMPLES = """
# ],
# "before": [],
-- name: To add the Splunk SSL
+- name: To add the Splunk SSL
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
@@ -531,7 +532,7 @@ EXAMPLES = """
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -575,7 +576,6 @@ EXAMPLES = """
# "sourcetype": "test_source_type"
# }
# ],
-
"""
RETURN = """
diff --git a/ansible_collections/splunk/es/pyproject.toml b/ansible_collections/splunk/es/pyproject.toml
index 96ec36d26..fa4225f3e 100644
--- a/ansible_collections/splunk/es/pyproject.toml
+++ b/ansible_collections/splunk/es/pyproject.toml
@@ -1,11 +1,7 @@
[tool.black]
-line-length = 79
+line-length = 100
[tool.pytest.ini_options]
addopts = ["-vvv", "-n", "2", "--log-level", "WARNING", "--color", "yes"]
-testpaths = [
- "tests",
-]
-filterwarnings = [
- 'ignore:AnsibleCollectionFinder has already been configured',
-] \ No newline at end of file
+testpaths = ["tests"]
+filterwarnings = ['ignore:AnsibleCollectionFinder has already been configured']
diff --git a/ansible_collections/splunk/es/test-requirements.txt b/ansible_collections/splunk/es/test-requirements.txt
index 8002336b1..94ff7c9e3 100644
--- a/ansible_collections/splunk/es/test-requirements.txt
+++ b/ansible_collections/splunk/es/test-requirements.txt
@@ -1,8 +1,9 @@
-black==22.3.0 ; python_version > '3.5'
+# For ansible-tox-linters
+black==23.3.0 ; python_version >= '3.7'
flake8
-mock ; python_version < '3.5'
-pexpect
-pytest-xdist
yamllint
-coverage==4.5.4
-git+https://github.com/ansible-community/pytest-ansible-units.git
+
+# Unit test runner
+pytest-ansible ; python_version >= '3.9'
+git+https://github.com/ansible-community/pytest-ansible-units.git ; python_version < '3.9'
+pytest-xdist
diff --git a/ansible_collections/splunk/es/tests/config.yml b/ansible_collections/splunk/es/tests/config.yml
new file mode 100644
index 000000000..41f529264
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/config.yml
@@ -0,0 +1,3 @@
+---
+modules:
+ python_requires: ">=3.6"
diff --git a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml
index d111fea78..12a6a008d 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml
+++ b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml
@@ -1,24 +1,24 @@
---
-- name: remove previous correlation_search
+- name: Remove previous correlation_search
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "absent"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: absent
-- name: create correlation_search
+- name: Create correlation_search
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "present"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: present
- name: Test splunk.es.adaptive_response_notable_event
adaptive_response_notable_event:
- name: "Fake notable event from playbook"
- correlation_search_name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake notable event from playbook, description edition."
- state: "present"
+ name: Fake notable event from playbook
+ correlation_search_name: Test Fake Coorelation Search From Playbook
+ description: Test Fake notable event from playbook, description edition.
+ state: present
next_steps:
- ping
- nslookup
@@ -29,17 +29,17 @@
register: adaptive_response_notable_event_out
- name: Assert Create splunk.es.adaptive_response_notable_event CHANGED
- assert:
+ ansible.builtin.assert:
that:
- adaptive_response_notable_event_out['changed'] == True
- adaptive_response_notable_event_out['failed'] == False
- name: Validate splunk.es.adaptive_response_notable_event idempotent
adaptive_response_notable_event:
- name: "Fake notable event from playbook"
- correlation_search_name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake notable event from playbook, description edition."
- state: "present"
+ name: Fake notable event from playbook
+ correlation_search_name: Test Fake Coorelation Search From Playbook
+ description: Test Fake notable event from playbook, description edition.
+ state: present
next_steps:
- ping
- nslookup
@@ -49,7 +49,7 @@
register: adaptive_response_notable_event_out2
- name: Assert Create splunk.es.adaptive_response_notable_event IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- adaptive_response_notable_event_out2['changed'] == False
- adaptive_response_notable_event_out2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml
index a2ae59ef4..fb49f1d27 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml
+++ b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml
@@ -1,74 +1,74 @@
---
- name: Cleanup old correlation_search
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "absent"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: absent
- name: Test correlation_search - CREATE
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "present"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: present
register: correlation_search_create_output
- name: Assert Create splunk.es.correlation_search CHANGED
- assert:
+ ansible.builtin.assert:
that:
- correlation_search_create_output['changed'] == True
- correlation_search_create_output['failed'] == False
- name: Test correlation_search - CREATE IDEMPOTENT
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "present"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: present
register: correlation_search_create_output2
- name: Assert Create splunk.es.correlation_search IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- correlation_search_create_output2['changed'] == False
- correlation_search_create_output2['failed'] == False
- name: Test correlation_search_info
correlation_search_info:
- name: "Test Fake Coorelation Search From Playbook"
+ name: Test Fake Coorelation Search From Playbook
register: correlation_search_info_output
- name: Assert Create splunk.es.correlation_search CHANGED
- assert:
+ ansible.builtin.assert:
that:
- correlation_search_info_output['changed'] == False
- correlation_search_info_output['failed'] == False
- name: Test correlation_search - DELETE
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "absent"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: absent
register: correlation_search_delete_output
- name: Assert Create splunk.es.correlation_search CHANGED
- assert:
+ ansible.builtin.assert:
that:
- correlation_search_delete_output['changed'] == True
- correlation_search_delete_output['failed'] == False
- name: Test correlation_search - DELETE IDEMPOTENT
correlation_search:
- name: "Test Fake Coorelation Search From Playbook"
- description: "Test Fake Coorelation Search From Playbook, description edition."
- search: 'source="/var/log/snort.log"'
- state: "absent"
+ name: Test Fake Coorelation Search From Playbook
+ description: Test Fake Coorelation Search From Playbook, description edition.
+ search: source="/var/log/snort.log"
+ state: absent
register: correlation_search_delete_output2
- name: Assert Create splunk.es.correlation_search IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- correlation_search_delete_output2['changed'] == False
- correlation_search_delete_output2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml
index 87459760e..c82828756 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml
@@ -1,58 +1,58 @@
---
- name: Clean up previous data_input_monitor
data_input_monitor:
- name: "/var/log/messages"
- state: "absent"
- recursive: True
+ name: /var/log/messages
+ state: absent
+ recursive: true
- name: Test data_input_monitor - CREATE
data_input_monitor:
- name: "/var/log/messages"
- state: "present"
- recursive: True
+ name: /var/log/messages
+ state: present
+ recursive: true
register: data_input_monitor_output
- name: Assert Create splunk.es.data_input_monitor CHANGED
- assert:
+ ansible.builtin.assert:
that:
- data_input_monitor_output['changed'] == True
- data_input_monitor_output['failed'] == False
- name: Test data_input_monitor - CREATE IDEMPOTENT
data_input_monitor:
- name: "/var/log/messages"
- state: "present"
- recursive: True
+ name: /var/log/messages
+ state: present
+ recursive: true
register: data_input_monitor_output2
- name: Assert Create splunk.es.data_input_monitor CREATE IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- data_input_monitor_output2['changed'] == False
- data_input_monitor_output2['failed'] == False
- name: Test data_input_monitor - DELETE
data_input_monitor:
- name: "/var/log/messages"
- state: "absent"
- recursive: True
+ name: /var/log/messages
+ state: absent
+ recursive: true
register: data_input_monitor_absent_output
- name: Assert Create splunk.es.data_input_monitor CHANGED
- assert:
+ ansible.builtin.assert:
that:
- data_input_monitor_absent_output['changed'] == True
- data_input_monitor_absent_output['failed'] == False
- name: Test data_input_monitor - DELETE IDEMPOTENT
data_input_monitor:
- name: "/var/log/messages"
- state: "absent"
- recursive: True
+ name: /var/log/messages
+ state: absent
+ recursive: true
register: data_input_monitor_absent_output2
- name: Assert Create splunk.es.data_input_monitor DELETE IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- data_input_monitor_absent_output2['changed'] == False
- data_input_monitor_absent_output2['failed'] == False
diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml
index 5082458c0..31f1f611d 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml
+++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml
@@ -2,18 +2,18 @@
- name: Cleanup previous data_input_network
data_input_network:
name: "8099"
- protocol: "tcp"
- state: "absent"
+ protocol: tcp
+ state: absent
- name: Test data_input_network - CREATE
data_input_network:
name: "8099"
- protocol: "tcp"
- state: "present"
+ protocol: tcp
+ state: present
register: data_input_network_output
- name: Assert Create splunk.es.data_input_network CHANGED
- assert:
+ ansible.builtin.assert:
that:
- data_input_network_output is changed
- data_input_network_output is not failed
@@ -21,12 +21,12 @@
- name: Test data_input_network - CREATE IDEMPOTENT
data_input_network:
name: "8099"
- protocol: "tcp"
- state: "present"
+ protocol: tcp
+ state: present
register: data_input_network_output2
- name: Assert Create splunk.es.data_input_network CREATE IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- data_input_network_output2 is not changed
- data_input_network_output2 is not failed
@@ -34,12 +34,12 @@
- name: Test data_input_network - DELETE
data_input_network:
name: "8099"
- protocol: "tcp"
- state: "absent"
+ protocol: tcp
+ state: absent
register: data_input_network_absent_output
- name: Assert Create splunk.es.data_input_network CHANGED
- assert:
+ ansible.builtin.assert:
that:
- data_input_network_absent_output is changed
- data_input_network_absent_output is not failed
@@ -47,12 +47,12 @@
- name: Test data_input_network - DELETE IDEMPOTENT
data_input_network:
name: "8099"
- protocol: "tcp"
- state: "absent"
+ protocol: tcp
+ state: absent
register: data_input_network_absent_output2
- name: Assert Create splunk.es.data_input_network DELETE IDEMPOTENT
- assert:
+ ansible.builtin.assert:
that:
- data_input_network_absent_output2 is not changed
- data_input_network_absent_output2 is not failed
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml
index 10c0fabcb..5f709c5aa 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml
@@ -1,2 +1,2 @@
---
-testcase: '*'
+testcase: "*"
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml
index dcc81f25f..c8ca9a326 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml
@@ -1,18 +1,19 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
-- name: set test_items
- set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+- name: Set test_items
+ ansible.builtin.set_fact:
+ test_items: "{{ test_cases.files | map(attribute='path') | list }}"
- name: Run test case (connection=ansible.netcommon.httpapi)
- include: '{{ test_case_to_run }}'
+ ansible.builtin.include_tasks: "{{ test_case_to_run }}"
vars:
ansible_connection: ansible.netcommon.httpapi
- with_items: '{{ test_items }}'
+ with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml
index 62cc1ae1e..098fa1e5a 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml
@@ -1,7 +1,7 @@
---
-- include: cli.yaml
+- ansible.builtin._include: cli.yaml
tags:
- cli
-- include: redirection.yaml
+- ansible.builtin._include: redirection.yaml
when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml
index bafc23a45..6bab72a07 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml
@@ -1,6 +1,6 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests/redirection'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests/redirection"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml
index 02e9074da..adaa80481 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml
@@ -10,40 +10,40 @@
- name: populate notable event adaptive response for test correlation search
splunk.es.splunk_adaptive_response_notable_events:
config:
- - correlation_search_name: Ansible Test
- description: test notable event
- drilldown_earliest_offset: $info_min_time$
- drilldown_latest_offset: $info_max_time$
- drilldown_name: test_drill_name
- drilldown_search: test_drill
- extract_artifacts:
- asset:
- - src
- - dest
- - dvc
- - orig_host
- identity:
- - src_user
- - user
- - src_user_id
- - src_user_role
- - user_id
- - user_role
- - vendor_account
- investigation_profiles:
- - test profile 1
- - test profile 2
- - test profile 3
- next_steps:
- - makestreams
- - nbtstat
- - nslookup
- name: ansible_test_notable
- recommended_actions:
- - email
- - logevent
- - makestreams
- - nbtstat
- security_domain: threat
- severity: high
- state: merged \ No newline at end of file
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ drilldown_name: test_drill_name
+ drilldown_search: test_drill
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ - src_user_role
+ - user_id
+ - user_role
+ - vendor_account
+ investigation_profiles:
+ - test profile 1
+ - test profile 2
+ - test profile 3
+ next_steps:
+ - makestreams
+ - nbtstat
+ - nslookup
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ - makestreams
+ - nbtstat
+ security_domain: threat
+ severity: high
+ state: merged
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml
index 8116add0d..daaaec773 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml
@@ -1,7 +1,6 @@
---
merged:
before: []
-
after:
- correlation_search_name: Ansible Test
description: test notable event
@@ -10,12 +9,12 @@ merged:
drilldown_name: test_drill_name
drilldown_search: test_drill
extract_artifacts:
- asset:
+ asset:
- src
- dest
- dvc
- orig_host
- identity:
+ identity:
- src_user
- user
- src_user_id
@@ -32,7 +31,7 @@ merged:
- nbtstat
- nslookup
name: ansible_test_notable
- recommended_actions:
+ recommended_actions:
- email
- logevent
- makestreams
@@ -49,12 +48,12 @@ replaced:
drilldown_name: test_drill_name
drilldown_search: test_drill
extract_artifacts:
- asset:
+ asset:
- src
- dest
- dvc
- orig_host
- identity:
+ identity:
- src_user
- user
- src_user_id
@@ -71,7 +70,7 @@ replaced:
- nbtstat
- nslookup
name: ansible_test_notable
- recommended_actions:
+ recommended_actions:
- email
- logevent
- makestreams
@@ -84,18 +83,18 @@ replaced:
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
+ asset:
- src
- dest
- identity:
+ identity:
- src_user
- user
- src_user_id
next_steps:
- makestreams
name: ansible_test_notable
- recommended_actions:
+ recommended_actions:
- email
- logevent
security_domain: threat
- severity: high \ No newline at end of file
+ severity: high
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml
index 39b507ff3..8a4f2226d 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml
@@ -33,6 +33,6 @@
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
- state: merged \ No newline at end of file
+ state: merged
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml
index 7707f9191..ee1b9020c 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml
@@ -3,4 +3,4 @@
splunk.es.splunk_correlation_searches:
config:
- name: Ansible Test
- state: deleted \ No newline at end of file
+ state: deleted
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml
index a83d1aacf..d575990e2 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml
@@ -1,6 +1,7 @@
---
- debug:
- msg: START Merged correlation_searches state for integration tests on connection={{
+ msg:
+ START Merged correlation_searches state for integration tests on connection={{
ansible_connection }}
- include_tasks: _remove_config.yaml
@@ -43,7 +44,7 @@
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
- name: Assert that task reports change and after dict is correctly generated
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml
index a41649a5b..9ac80cc15 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml
@@ -6,7 +6,6 @@
- include_tasks: _populate_config.yaml
- block:
-
- name: Replace existing correlation searches configuration
register: result
splunk.es.splunk_correlation_searches: &id001
@@ -49,7 +48,7 @@
throttle_fields_to_group_by:
- test_field1
- test_field2
- suppress_alerts: True
+ suppress_alerts: true
search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
- assert:
@@ -58,8 +57,9 @@
- replaced['before'] == result['correlation_searches']['before']
- replaced['after'] == result['correlation_searches']['after']
- - name: Replaces device configuration of listed data inputs networks configuration with
- provided configuration (IDEMPOTENT)
+ - name:
+ Replaces device configuration of listed data inputs networks configuration with
+ provided configuration (IDEMPOTENT)
register: result
splunk.es.splunk_correlation_searches: *id001
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml
index 151e7305a..f0124db38 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml
@@ -42,7 +42,7 @@
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
- name: Gather correlation searches configuration facts
@@ -93,7 +93,7 @@
throttle_fields_to_group_by:
- test_field1
- test_field2
- suppress_alerts: True
+ suppress_alerts: true
search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6'
state: replaced
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml
index 10c0fabcb..5f709c5aa 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml
@@ -1,2 +1,2 @@
---
-testcase: '*'
+testcase: "*"
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml
index dcc81f25f..c8ca9a326 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml
@@ -1,18 +1,19 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
-- name: set test_items
- set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+- name: Set test_items
+ ansible.builtin.set_fact:
+ test_items: "{{ test_cases.files | map(attribute='path') | list }}"
- name: Run test case (connection=ansible.netcommon.httpapi)
- include: '{{ test_case_to_run }}'
+ ansible.builtin.include_tasks: "{{ test_case_to_run }}"
vars:
ansible_connection: ansible.netcommon.httpapi
- with_items: '{{ test_items }}'
+ with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml
index 62cc1ae1e..098fa1e5a 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml
@@ -1,7 +1,7 @@
---
-- include: cli.yaml
+- ansible.builtin._include: cli.yaml
tags:
- cli
-- include: redirection.yaml
+- ansible.builtin._include: redirection.yaml
when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml
index bafc23a45..6bab72a07 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml
@@ -1,6 +1,6 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests/redirection'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests/redirection"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml
index 2bb0129a4..c5935e460 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml
@@ -4,19 +4,19 @@
config:
- name: "/var/log"
blacklist: '/\/var\/log\/[a-z]/gm'
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
- disabled: False
- follow_tail: False
+ disabled: false
+ follow_tail: false
host: "$decideOnStartup"
host_regex: "/(test_host)/gm"
host_segment: 3
ignore_older_than: 5d
index: default
- recursive: True
+ recursive: true
rename_source: test
sourcetype: test_source_type
time_before_close: 4
whitelist: '/\/var\/log\/[a-z]/gm'
- state: merged \ No newline at end of file
+ state: merged
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml
index d0fdb2d90..ca3308f11 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml
@@ -3,4 +3,4 @@
splunk.es.splunk_data_inputs_monitor:
config:
- name: "/var/log"
- state: deleted \ No newline at end of file
+ state: deleted
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml
index 8f19b500f..4cbac1c04 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml
@@ -1,6 +1,7 @@
---
- debug:
- msg: Start Deleted integration state for data_inputs_monitors ansible_connection={{ ansible_connection
+ msg:
+ Start Deleted integration state for data_inputs_monitors ansible_connection={{ ansible_connection
}}
- include_tasks: _remove_dim_config.yaml
@@ -14,7 +15,7 @@
- name: /var/log
state: deleted
register: result
-
+
- assert:
that:
- result.changed == true
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml
index 0388c26c1..5ad0c8bad 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml
@@ -1,6 +1,7 @@
---
- debug:
- msg: START Merged data_inputs_monitor state for integration tests on connection={{
+ msg:
+ START Merged data_inputs_monitor state for integration tests on connection={{
ansible_connection }}
- include_tasks: _remove_dim_config.yaml
@@ -14,17 +15,17 @@
config:
- name: "/var/log"
blacklist: '/\/var\/log\/[a-z]/gm'
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
- disabled: False
- follow_tail: False
+ disabled: false
+ follow_tail: false
host: "$decideOnStartup"
host_regex: "/(test_host)/gm"
host_segment: 3
ignore_older_than: 5d
index: default
- recursive: True
+ recursive: true
rename_source: test
sourcetype: test_source_type
time_before_close: 4
@@ -53,5 +54,4 @@
- result['changed'] == false
always:
-
- - include_tasks: _remove_dim_config.yaml \ No newline at end of file
+ - include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml
index 7a9dd8c46..e2435f2ce 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml
@@ -1,13 +1,13 @@
---
- debug:
- msg: START Replaced data_inputs_monitor state for integration tests on connection={{ ansible_connection
+ msg:
+ START Replaced data_inputs_monitor state for integration tests on connection={{ ansible_connection
}}
- include_tasks: _remove_dim_config.yaml
- include_tasks: _populate_dim_config.yaml
- block:
-
- name: Replace existing data inputs monitors configuration
register: result
splunk.es.splunk_data_inputs_monitor: &id001
@@ -17,7 +17,6 @@
blacklist: '/\/var\/log\/[a-z0-9]/gm'
crc_salt: <SOURCE>
-
- assert:
that:
- result.changed == true
@@ -28,8 +27,9 @@
symmetric_difference(result['data_inputs_monitor']['after'][0] |\
dict2items) | length==3}}"
- - name: Replaces device configuration of listed data inputs networks configuration with
- provided configuration (IDEMPOTENT)
+ - name:
+ Replaces device configuration of listed data inputs networks configuration with
+ provided configuration (IDEMPOTENT)
register: result
splunk.es.splunk_data_inputs_monitor: *id001
@@ -39,5 +39,4 @@
- result['changed'] == false
always:
-
- include_tasks: _remove_dim_config.yaml
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml
index 4025c446c..79904a72b 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml
@@ -6,7 +6,6 @@
- include_tasks: _populate_dim_config.yaml
- block:
-
- name: Apply the provided configuration (base config)
register: base_config
splunk.es.splunk_data_inputs_monitor: &id001
@@ -14,17 +13,17 @@
config:
- name: "/var/log"
blacklist: '/\/var\/log\/[a-z]/gm'
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
- disabled: False
- follow_tail: False
+ disabled: false
+ follow_tail: false
host: "$decideOnStartup"
host_regex: "/(test_host)/gm"
host_segment: 3
ignore_older_than: 5d
index: default
- recursive: True
+ recursive: true
rename_source: test
sourcetype: test_source_type
time_before_close: 4
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml
index 881a750b4..87747ada4 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml
@@ -1,46 +1,45 @@
---
merged:
before: []
-
after:
- name: "/var/log"
- blacklist: '/\/var\/log\/[a-z]/gm'
- check_index: True
- check_path: True
+ name: /var/log
+ blacklist: /\/var\/log\/[a-z]/gm
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
- disabled: False
- follow_tail: False
- host: "$decideOnStartup"
- host_regex: "/(test_host)/gm"
+ disabled: false
+ follow_tail: false
+ host: $decideOnStartup
+ host_regex: /(test_host)/gm
host_segment: 3
ignore_older_than: 5d
index: default
- recursive: True
+ recursive: true
rename_source: test
sourcetype: test_source_type
time_before_close:
- whitelist: '/\/var\/log\/[a-z]/gm'
+ whitelist: /\/var\/log\/[a-z]/gm
replaced:
before:
- name: "/var/log"
- blacklist: '/\/var\/log\/[a-z]/gm'
- check_index: True
- check_path: True
+ name: /var/log
+ blacklist: /\/var\/log\/[a-z]/gm
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
- disabled: False
- follow_tail: False
- host: "$decideOnStartup"
- host_regex: "/(test_host)/gm"
+ disabled: false
+ follow_tail: false
+ host: $decideOnStartup
+ host_regex: /(test_host)/gm
host_segment: 3
ignore_older_than: 5d
index: default
- recursive: True
+ recursive: true
rename_source: test
sourcetype: test_source_type
time_before_close:
- whitelist: '/\/var\/log\/[a-z]/gm'
+ whitelist: /\/var\/log\/[a-z]/gm
after:
- name: "/var/log"
- blacklist: '/\/var\/log\/[a-z0-9]/gm'
- crc_salt: <SOURCE> \ No newline at end of file
+ name: /var/log
+ blacklist: /\/var\/log\/[a-z0-9]/gm
+ crc_salt: <SOURCE>
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml
index 10c0fabcb..5f709c5aa 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml
@@ -1,2 +1,2 @@
---
-testcase: '*'
+testcase: "*"
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml
index dcc81f25f..c8ca9a326 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml
@@ -1,18 +1,19 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
-- name: set test_items
- set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
+- name: Set test_items
+ ansible.builtin.set_fact:
+ test_items: "{{ test_cases.files | map(attribute='path') | list }}"
- name: Run test case (connection=ansible.netcommon.httpapi)
- include: '{{ test_case_to_run }}'
+ ansible.builtin.include_tasks: "{{ test_case_to_run }}"
vars:
ansible_connection: ansible.netcommon.httpapi
- with_items: '{{ test_items }}'
+ with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
tags: connection_httpapi
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml
index 62cc1ae1e..098fa1e5a 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml
@@ -1,7 +1,7 @@
---
-- include: cli.yaml
+- ansible.builtin._include: cli.yaml
tags:
- cli
-- include: redirection.yaml
+- ansible.builtin._include: redirection.yaml
when: ansible_version.full is version('2.10.0', '>=')
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml
index bafc23a45..6bab72a07 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml
@@ -1,6 +1,6 @@
---
-- name: collect all test cases
- find:
- paths: '{{ role_path }}/tests/redirection'
- patterns: '{{ testcase }}.yaml'
+- name: Collect all test cases
+ ansible.builtin.find:
+ paths: "{{ role_path }}/tests/redirection"
+ patterns: "{{ testcase }}.yaml"
register: test_cases
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml
index 60f87afbf..9fb608dd4 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml
@@ -6,7 +6,7 @@
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -18,7 +18,7 @@
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
host: "$decideOnStartup"
restrict_to_host: default
- protocol: tcp
@@ -31,13 +31,13 @@
- protocol: udp
name: 7890
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
- no_appending_timestamp: True
- no_priority_stripping: True
+ no_appending_timestamp: true
+ no_priority_stripping: true
queue: parsingQueue
restrict_to_host: default
source: test_source
sourcetype: test_source_type
- state: merged \ No newline at end of file
+ state: merged
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml
index bf904c27d..62c42c5e7 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml
@@ -13,4 +13,4 @@
name: test_token
- protocol: udp
name: default:7890
- state: deleted \ No newline at end of file
+ state: deleted
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml
index 842524ec6..2fcdaa9db 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml
@@ -17,7 +17,7 @@
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -29,7 +29,7 @@
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
host: "$decideOnStartup"
restrict_to_host: default
- protocol: tcp
@@ -42,11 +42,11 @@
- protocol: udp
name: 7890
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
- no_appending_timestamp: True
- no_priority_stripping: True
+ no_appending_timestamp: true
+ no_priority_stripping: true
queue: parsingQueue
restrict_to_host: default
source: test_source
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml
index 340df5282..2f261efb9 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml
@@ -17,7 +17,7 @@
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -29,7 +29,7 @@
datatype: cooked
name: 8101
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
restrict_to_host: default
- protocol: tcp
@@ -39,11 +39,11 @@
- protocol: udp
name: 7890
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
- no_appending_timestamp: False
- no_priority_stripping: False
+ no_appending_timestamp: false
+ no_priority_stripping: false
queue: parsingQueue
restrict_to_host: default
source: test_source
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml
index 1fa3e577c..4ad739da5 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml
@@ -17,7 +17,7 @@
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -29,7 +29,7 @@
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
host: "$decideOnStartup"
restrict_to_host: default
- protocol: tcp
@@ -39,11 +39,11 @@
- protocol: udp
name: 7890
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
- no_appending_timestamp: True
- no_priority_stripping: True
+ no_appending_timestamp: true
+ no_priority_stripping: true
queue: parsingQueue
restrict_to_host: default
source: test_source
@@ -74,7 +74,7 @@
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -86,7 +86,7 @@
datatype: cooked
name: 8101
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
restrict_to_host: default
- protocol: tcp
@@ -96,11 +96,11 @@
- protocol: udp
name: 7890
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
- no_appending_timestamp: False
- no_priority_stripping: False
+ no_appending_timestamp: false
+ no_priority_stripping: false
queue: parsingQueue
restrict_to_host: default
source: test_source
diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml
index 942b75851..46b42f7e2 100644
--- a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml
+++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml
@@ -15,7 +15,7 @@ merged:
datatype: raw
name: default:8100
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
queue: parsingQueue
@@ -27,13 +27,13 @@ merged:
datatype: cooked
name: default:8101
connection_host: ip
- disabled: False
+ disabled: false
host: $decideOnStartup
restrict_to_host: default
- protocol: tcp
datatype: splunktcptoken
name: splunktcptoken://test_token
- token: 01234567-0123-0123-0123-012345678901
+ token: "01234567-0123-0123-0123-012345678901"
- protocol: tcp
datatype: ssl
name: test_host
@@ -44,11 +44,11 @@ merged:
- protocol: udp
name: default:7890
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
- no_appending_timestamp: True
- no_priority_stripping: True
+ no_appending_timestamp: true
+ no_priority_stripping: true
queue: parsingQueue
restrict_to_host: default
source: test_source
@@ -60,7 +60,7 @@ replaced:
datatype: raw
name: default:8100
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
queue: parsingQueue
@@ -72,21 +72,21 @@ replaced:
datatype: cooked
name: default:8101
connection_host: ip
- disabled: False
+ disabled: false
host: $decideOnStartup
restrict_to_host: default
- protocol: tcp
datatype: splunktcptoken
name: splunktcptoken://test_token
- token: 01234567-0123-0123-0123-012345678901
+ token: "01234567-0123-0123-0123-012345678901"
- protocol: udp
name: default:7890
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
- no_appending_timestamp: True
- no_priority_stripping: True
+ no_appending_timestamp: true
+ no_priority_stripping: true
queue: parsingQueue
restrict_to_host: default
source: test_source
@@ -96,7 +96,7 @@ replaced:
datatype: raw
name: default:8100
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
queue: parsingQueue
@@ -108,21 +108,21 @@ replaced:
datatype: cooked
name: default:8101
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
restrict_to_host: default
- protocol: tcp
datatype: splunktcptoken
name: splunktcptoken://test_token
- token: 01234567-0123-0123-0123-012345678900
+ token: "01234567-0123-0123-0123-012345678900"
- protocol: udp
name: default:7890
connection_host: ip
- disabled: True
+ disabled: true
host: $decideOnStartup
index: default
- no_appending_timestamp: False
- no_priority_stripping: False
+ no_appending_timestamp: false
+ no_priority_stripping: false
queue: parsingQueue
restrict_to_host: default
source: test_source
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.12.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.12.txt
new file mode 100644
index 000000000..0cd1efee7
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.12.txt
@@ -0,0 +1 @@
+tests/unit/mock/loader.py pylint:arguments-renamed
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.13.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.13.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.13.txt
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.14.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.14.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.14.txt
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.15.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.15.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.15.txt
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.16.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.16.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.16.txt
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.17.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.17.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.17.txt
diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt
index ed0da94eb..16dfcae4d 100644
--- a/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt
+++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt
@@ -6,4 +6,4 @@ plugins/modules/data_input_monitor.py validate-modules:invalid-documentation
plugins/modules/data_input_network.py validate-modules:deprecation-mismatch
plugins/modules/data_input_network.py validate-modules:invalid-documentation
plugins/modules/adaptive_response_notable_event.py validate-modules:deprecation-mismatch
-plugins/modules/adaptive_response_notable_event.py validate-modules:invalid-documentation \ No newline at end of file
+plugins/modules/adaptive_response_notable_event.py validate-modules:invalid-documentation
diff --git a/ansible_collections/splunk/es/tests/unit/compat/builtins.py b/ansible_collections/splunk/es/tests/unit/compat/builtins.py
deleted file mode 100644
index bfc8adfbe..000000000
--- a/ansible_collections/splunk/es/tests/unit/compat/builtins.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-#
-# Compat for python2.7
-#
-
-# One unittest needs to import builtins via __import__() so we need to have
-# the string that represents it
-try:
- import __builtin__
-except ImportError:
- BUILTINS = "builtins"
-else:
- BUILTINS = "__builtin__"
diff --git a/ansible_collections/splunk/es/tests/unit/compat/mock.py b/ansible_collections/splunk/es/tests/unit/compat/mock.py
index 2ea98a17f..61ac88700 100644
--- a/ansible_collections/splunk/es/tests/unit/compat/mock.py
+++ b/ansible_collections/splunk/es/tests/unit/compat/mock.py
@@ -19,6 +19,7 @@
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
"""
@@ -26,6 +27,7 @@ Compat module for Python3.x's unittest.mock module
"""
import sys
+
# Python 2.7
# Note: Could use the pypi mock library on python3.x as well as python2.x. It
@@ -104,7 +106,7 @@ if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
import _io
file_spec = list(
- set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))
+ set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))),
)
if mock is None:
diff --git a/ansible_collections/splunk/es/tests/unit/compat/unittest.py b/ansible_collections/splunk/es/tests/unit/compat/unittest.py
index df3379b82..df4266ec9 100644
--- a/ansible_collections/splunk/es/tests/unit/compat/unittest.py
+++ b/ansible_collections/splunk/es/tests/unit/compat/unittest.py
@@ -18,6 +18,7 @@
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
"""
@@ -26,6 +27,7 @@ Compat module for Python2.7's unittest module
import sys
+
# Allow wildcard import because we really do want to import all of
# unittests's symbols into this compat shim
# pylint: disable=wildcard-import,unused-wildcard-import
diff --git a/ansible_collections/splunk/es/tests/unit/mock/loader.py b/ansible_collections/splunk/es/tests/unit/mock/loader.py
index 19c44a7e8..011c67b29 100644
--- a/ansible_collections/splunk/es/tests/unit/mock/loader.py
+++ b/ansible_collections/splunk/es/tests/unit/mock/loader.py
@@ -18,19 +18,20 @@
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import os
from ansible.errors import AnsibleParserError
-from ansible.parsing.dataloader import DataLoader
from ansible.module_utils._text import to_bytes, to_text
+from ansible.parsing.dataloader import DataLoader
class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
- assert type(file_mapping) == dict
+ assert isinstance(file_mapping, dict)
super(DictDataLoader, self).__init__()
diff --git a/ansible_collections/splunk/es/tests/unit/mock/path.py b/ansible_collections/splunk/es/tests/unit/mock/path.py
index 1e5902864..06f1a3d56 100644
--- a/ansible_collections/splunk/es/tests/unit/mock/path.py
+++ b/ansible_collections/splunk/es/tests/unit/mock/path.py
@@ -1,12 +1,13 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import (
- MagicMock,
-)
from ansible.utils.path import unfrackpath
+from ansible_collections.splunk.es.tests.unit.compat.mock import MagicMock
+
mock_unfrackpath_noop = MagicMock(
- spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x
+ spec_set=unfrackpath,
+ side_effect=lambda x, *args, **kwargs: x,
)
diff --git a/ansible_collections/splunk/es/tests/unit/mock/procenv.py b/ansible_collections/splunk/es/tests/unit/mock/procenv.py
index f7ab5fe91..3699c6308 100644
--- a/ansible_collections/splunk/es/tests/unit/mock/procenv.py
+++ b/ansible_collections/splunk/es/tests/unit/mock/procenv.py
@@ -19,16 +19,19 @@
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-import sys
import json
+import sys
from contextlib import contextmanager
from io import BytesIO, StringIO
-from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
-from ansible.module_utils.six import PY3
+
from ansible.module_utils._text import to_bytes
+from ansible.module_utils.six import PY3
+
+from ansible_collections.splunk.es.tests.unit.compat import unittest
@contextmanager
diff --git a/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py
index b34ae1340..82d01f5c5 100644
--- a/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py
+++ b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py
@@ -14,10 +14,10 @@
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils._text import to_bytes
-
from ansible.parsing.vault import VaultSecret
@@ -38,5 +38,7 @@ class TextVaultSecret(VaultSecret):
def bytes(self):
"""The text encoded with encoding, unless we specifically set _bytes."""
return self._bytes or to_bytes(
- self.text, encoding=self.encoding, errors=self.errors
+ self.text,
+ encoding=self.encoding,
+ errors=self.errors,
)
diff --git a/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py
index 5df30aaed..e46d3180b 100644
--- a/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py
+++ b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py
@@ -1,12 +1,14 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import io
+
import yaml
from ansible.module_utils.six import PY3
-from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
+from ansible.parsing.yaml.loader import AnsibleLoader
class YamlTestUtils(object):
@@ -45,7 +47,8 @@ class YamlTestUtils(object):
# dump the gen 2 objects directory to strings
string_from_object_dump_2 = self._dump_string(
- obj_2, dumper=AnsibleDumper
+ obj_2,
+ dumper=AnsibleDumper,
)
# The gen 1 and gen 2 yaml strings
@@ -59,7 +62,8 @@ class YamlTestUtils(object):
obj_3 = loader_3.get_data()
string_from_object_dump_3 = self._dump_string(
- obj_3, dumper=AnsibleDumper
+ obj_3,
+ dumper=AnsibleDumper,
)
self.assertEqual(obj, obj_3)
@@ -93,10 +97,14 @@ class YamlTestUtils(object):
if PY3:
yaml.dump(
- obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper
+ obj_from_stream,
+ stream_obj_from_stream,
+ Dumper=AnsibleDumper,
)
yaml.dump(
- obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper
+ obj_from_stream,
+ stream_obj_from_string,
+ Dumper=AnsibleDumper,
)
else:
yaml.dump(
@@ -120,25 +128,27 @@ class YamlTestUtils(object):
if PY3:
yaml_string_obj_from_stream = yaml.dump(
- obj_from_stream, Dumper=AnsibleDumper
+ obj_from_stream,
+ Dumper=AnsibleDumper,
)
yaml_string_obj_from_string = yaml.dump(
- obj_from_string, Dumper=AnsibleDumper
+ obj_from_string,
+ Dumper=AnsibleDumper,
)
else:
yaml_string_obj_from_stream = yaml.dump(
- obj_from_stream, Dumper=AnsibleDumper, encoding=None
+ obj_from_stream,
+ Dumper=AnsibleDumper,
+ encoding=None,
)
yaml_string_obj_from_string = yaml.dump(
- obj_from_string, Dumper=AnsibleDumper, encoding=None
+ obj_from_string,
+ Dumper=AnsibleDumper,
+ encoding=None,
)
assert yaml_string == yaml_string_obj_from_stream
- assert (
- yaml_string
- == yaml_string_obj_from_stream
- == yaml_string_obj_from_string
- )
+ assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
assert (
yaml_string
== yaml_string_obj_from_stream
diff --git a/ansible_collections/splunk/es/tests/unit/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/modules/conftest.py
index e19a1e04c..349e71ada 100644
--- a/ansible_collections/splunk/es/tests/unit/modules/conftest.py
+++ b/ansible_collections/splunk/es/tests/unit/modules/conftest.py
@@ -2,15 +2,16 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
import pytest
-from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.six import string_types
@pytest.fixture
@@ -21,20 +22,13 @@ def patch_ansible_module(request, mocker):
if "ANSIBLE_MODULE_ARGS" not in request.param:
request.param = {"ANSIBLE_MODULE_ARGS": request.param}
if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
- request.param["ANSIBLE_MODULE_ARGS"][
- "_ansible_remote_tmp"
- ] = "/tmp"
- if (
- "_ansible_keep_remote_files"
- not in request.param["ANSIBLE_MODULE_ARGS"]
- ):
- request.param["ANSIBLE_MODULE_ARGS"][
- "_ansible_keep_remote_files"
- ] = False
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_keep_remote_files"] = False
args = json.dumps(request.param)
else:
raise Exception(
- "Malformed data to the patch_ansible_module pytest fixture"
+ "Malformed data to the patch_ansible_module pytest fixture",
)
mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args))
diff --git a/ansible_collections/splunk/es/tests/unit/modules/utils.py b/ansible_collections/splunk/es/tests/unit/modules/utils.py
index d55afc0b3..923594463 100644
--- a/ansible_collections/splunk/es/tests/unit/modules/utils.py
+++ b/ansible_collections/splunk/es/tests/unit/modules/utils.py
@@ -1,13 +1,15 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
-from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
+from ansible_collections.splunk.es.tests.unit.compat import unittest
+from ansible_collections.splunk.es.tests.unit.compat.mock import patch
+
def set_module_args(args):
if "_ansible_remote_tmp" not in args:
@@ -41,7 +43,9 @@ def fail_json(*args, **kwargs):
class ModuleTestCase(unittest.TestCase):
def setUp(self):
self.mock_module = patch.multiple(
- basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json
+ basic.AnsibleModule,
+ exit_json=exit_json,
+ fail_json=fail_json,
)
self.mock_module.start()
self.mock_sleep = patch("time.sleep")
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py
index b6a84fc78..96993c6dc 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py
@@ -18,27 +18,27 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils.six import PY2
+
builtin_import = "builtins.__import__"
if PY2:
builtin_import = "__builtin__.__import__"
import tempfile
+
from ansible.playbook.task import Task
from ansible.template import Templar
+from ansible_collections.ansible.utils.tests.unit.compat.mock import MagicMock, patch
+
from ansible_collections.splunk.es.plugins.action.splunk_adaptive_response_notable_events import (
ActionModule,
)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
-from ansible_collections.ansible.utils.tests.unit.compat.mock import (
- MagicMock,
- patch,
-)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
RESPONSE_PAYLOAD = [
{
@@ -68,8 +68,8 @@ RESPONSE_PAYLOAD = [
"actions": "notable",
},
"name": "Ansible Test",
- }
- ]
+ },
+ ],
},
{
"entry": [
@@ -97,8 +97,8 @@ RESPONSE_PAYLOAD = [
"actions": "notable",
},
"name": "Ansible Test",
- }
- ]
+ },
+ ],
},
]
@@ -153,7 +153,7 @@ REQUEST_PAYLOAD = [
class TestSplunkEsAdaptiveResponseNotableEvents:
- def setup(self):
+ def setup_method(self):
task = MagicMock(Task)
# Ansible > 2.13 looks for check_mode in task
task.check_mode = False
@@ -161,7 +161,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
# Ansible <= 2.13 looks for check_mode in play_context
play_context.check_mode = False
connection = patch(
- "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection",
)
connection._socket_path = tempfile.NamedTemporaryFile().name
fake_loader = {}
@@ -186,8 +186,39 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
}
@patch("ansible.module_utils.connection.Connection.__rpc__")
+ def test_es_adaptive_response_notable_events_merged_idempotent(
+ self,
+ conn,
+ monkeypatch,
+ ):
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
+ self._plugin._connection._shell = MagicMock()
+
+ def create_update(self, rest_path, data=None):
+ return RESPONSE_PAYLOAD[0]
+
+ def get_by_path(self, path):
+ return RESPONSE_PAYLOAD[0]
+
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
+ monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+
+ self._plugin._task.args = {
+ "state": "merged",
+ "config": [REQUEST_PAYLOAD[0]],
+ }
+ result = self._plugin.run(task_vars=self._task_vars)
+ # recheck with module
+ assert (
+ result["adaptive_response_notable_events"]["before"][0]["correlation_search_name"]
+ == "Ansible Test"
+ )
+
+ @patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_merged_01(
- self, connection, monkeypatch
+ self,
+ connection,
+ monkeypatch,
):
metadata = {
"search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent'
@@ -205,9 +236,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "merged",
@@ -218,7 +247,9 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_merged_02(
- self, connection, monkeypatch
+ self,
+ connection,
+ monkeypatch,
):
self._plugin.api_response = RESPONSE_PAYLOAD[0]
self._plugin.search_for_resource_name = MagicMock()
@@ -232,9 +263,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "merged",
@@ -245,37 +274,12 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
assert result["changed"] is True
@patch("ansible.module_utils.connection.Connection.__rpc__")
- def test_es_adaptive_response_notable_events_merged_idempotent(
- self, conn, monkeypatch
- ):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
- self._plugin._connection._shell = MagicMock()
-
- def create_update(self, rest_path, data=None):
- return RESPONSE_PAYLOAD[0]
-
- def get_by_path(self, path):
- return RESPONSE_PAYLOAD[0]
-
- monkeypatch.setattr(SplunkRequest, "create_update", create_update)
- monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
-
- self._plugin._task.args = {
- "state": "merged",
- "config": [REQUEST_PAYLOAD[0]],
- }
- result = self._plugin.run(task_vars=self._task_vars)
- assert result["changed"] is False
-
- @patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_replaced_01(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = (
@@ -305,11 +309,11 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_replaced_02(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = (
@@ -339,11 +343,11 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_replaced_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(self, rest_path, data=None):
@@ -360,16 +364,18 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
"config": [REQUEST_PAYLOAD[0]],
}
result = self._plugin.run(task_vars=self._task_vars)
-
- assert result["changed"] is False
+ assert (
+ result["adaptive_response_notable_events"]["before"][0]["correlation_search_name"]
+ == "Ansible Test"
+ )
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_deleted(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
@@ -388,7 +394,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
"config": [
{
"correlation_search_name": "Ansible Test",
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
@@ -397,11 +403,10 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_deleted_idempotent(
- self, connection
+ self,
+ connection,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = {}, {}
@@ -411,7 +416,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
"config": [
{
"correlation_search_name": "Ansible Test",
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
@@ -419,11 +424,11 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_adaptive_response_notable_events_gathered(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = (
@@ -436,7 +441,7 @@ class TestSplunkEsAdaptiveResponseNotableEvents:
"config": [
{
"correlation_search_name": "Ansible Test",
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py
index fca268c98..92e994747 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py
@@ -18,27 +18,25 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils.six import PY2
+
builtin_import = "builtins.__import__"
if PY2:
builtin_import = "__builtin__.__import__"
import tempfile
+
from ansible.playbook.task import Task
from ansible.template import Templar
-from ansible_collections.splunk.es.plugins.action.splunk_correlation_searches import (
- ActionModule,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
-from ansible_collections.ansible.utils.tests.unit.compat.mock import (
- MagicMock,
- patch,
-)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import MagicMock, patch
+
+from ansible_collections.splunk.es.plugins.action.splunk_correlation_searches import ActionModule
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
RESPONSE_PAYLOAD = {
"entry": [
@@ -73,8 +71,8 @@ RESPONSE_PAYLOAD = {
'n.src" as "src" | where "count">=6',
},
"name": "Ansible Test",
- }
- ]
+ },
+ ],
}
REQUEST_PAYLOAD = [
@@ -92,7 +90,7 @@ REQUEST_PAYLOAD = [
{
"framework": "test_framework",
"custom_annotations": ["test5"],
- }
+ },
],
},
"ui_dispatch_context": "SplunkEnterpriseSecuritySuite",
@@ -128,7 +126,7 @@ REQUEST_PAYLOAD = [
{
"framework": "test_framework2",
"custom_annotations": ["test9", "test10"],
- }
+ },
],
},
"ui_dispatch_context": "SplunkEnterpriseSecuritySuite",
@@ -154,7 +152,7 @@ REQUEST_PAYLOAD = [
class TestSplunkEsCorrelationSearches:
- def setup(self):
+ def setup_method(self):
task = MagicMock(Task)
# Ansible > 2.13 looks for check_mode in task
task.check_mode = False
@@ -162,9 +160,9 @@ class TestSplunkEsCorrelationSearches:
# Ansible <= 2.13 looks for check_mode in play_context
play_context.check_mode = False
connection = patch(
- "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection",
)
- connection._socket_path = tempfile.NamedTemporaryFile().name
+ # connection._socket_path = tempfile.NamedTemporaryFile().name
fake_loader = {}
templar = Templar(loader=fake_loader)
self._plugin = ActionModule(
@@ -190,9 +188,7 @@ class TestSplunkEsCorrelationSearches:
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "merged",
@@ -203,11 +199,11 @@ class TestSplunkEsCorrelationSearches:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_merged_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(self, rest_path, data=None):
@@ -224,13 +220,12 @@ class TestSplunkEsCorrelationSearches:
"config": [REQUEST_PAYLOAD[0]],
}
result = self._plugin.run(task_vars=self._task_vars)
+ # recheck with module
assert result["changed"] is False
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_replaced_01(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
@@ -257,9 +252,7 @@ class TestSplunkEsCorrelationSearches:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_replaced_02(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
@@ -286,11 +279,11 @@ class TestSplunkEsCorrelationSearches:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_replaced_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(self, rest_path, data=None):
@@ -312,13 +305,11 @@ class TestSplunkEsCorrelationSearches:
}
result = self._plugin.run(task_vars=self._task_vars)
- assert result["changed"] is False
+ assert result["changed"] is True
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_deleted(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def get_by_path(self, path):
@@ -342,9 +333,7 @@ class TestSplunkEsCorrelationSearches:
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = {}
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "deleted",
@@ -355,9 +344,7 @@ class TestSplunkEsCorrelationSearches:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_correlation_searches_gathered(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def get_by_path(self, path):
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py
index 068fe638d..c6b07baae 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py
@@ -18,27 +18,25 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils.six import PY2
+
builtin_import = "builtins.__import__"
if PY2:
builtin_import = "__builtin__.__import__"
import tempfile
+
from ansible.playbook.task import Task
from ansible.template import Templar
-from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_monitor import (
- ActionModule,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
-from ansible_collections.ansible.utils.tests.unit.compat.mock import (
- MagicMock,
- patch,
-)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import MagicMock, patch
+
+from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_monitor import ActionModule
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
RESPONSE_PAYLOAD = {
"entry": [
@@ -66,8 +64,8 @@ RESPONSE_PAYLOAD = {
"whitelist": "//var/log/[0-9]/gm",
},
"name": "/var/log",
- }
- ]
+ },
+ ],
}
REQUEST_PAYLOAD = [
@@ -99,7 +97,7 @@ REQUEST_PAYLOAD = [
class TestSplunkEsDataInputsMonitorRules:
- def setup(self):
+ def setup_method(self):
task = MagicMock(Task)
# Ansible > 2.13 looks for check_mode in task
task.check_mode = False
@@ -107,7 +105,7 @@ class TestSplunkEsDataInputsMonitorRules:
# Ansible <= 2.13 looks for check_mode in play_context
play_context.check_mode = False
connection = patch(
- "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection",
)
connection._socket_path = tempfile.NamedTemporaryFile().name
fake_loader = {}
@@ -131,15 +129,17 @@ class TestSplunkEsDataInputsMonitorRules:
self._plugin.search_for_resource_name.return_value = {}
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return RESPONSE_PAYLOAD
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "merged",
@@ -150,13 +150,15 @@ class TestSplunkEsDataInputsMonitorRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_monitor_merged_idempotent(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return RESPONSE_PAYLOAD
@@ -182,23 +184,25 @@ class TestSplunkEsDataInputsMonitorRules:
"recursive": True,
"sourcetype": "test_source_type",
"whitelist": "//var/log/[0-9]/gm",
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
- assert result["changed"] is False
+ assert result["data_inputs_monitor"]["before"][0]["name"] == "/var/log"
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_monitor_replaced(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return RESPONSE_PAYLOAD
@@ -220,7 +224,7 @@ class TestSplunkEsDataInputsMonitorRules:
"index": "default",
"name": "/var/log",
"recursive": True,
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
@@ -228,15 +232,19 @@ class TestSplunkEsDataInputsMonitorRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_monitor_replaced_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return RESPONSE_PAYLOAD
@@ -267,8 +275,8 @@ class TestSplunkEsDataInputsMonitorRules:
"whitelist": "//var/log/[0-9]/gm",
},
"name": "/var/log",
- }
- ]
+ },
+ ],
}
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
@@ -290,7 +298,7 @@ class TestSplunkEsDataInputsMonitorRules:
"recursive": True,
"sourcetype": "test_source_type",
"whitelist": "//var/log/[0-9]/gm",
- }
+ },
],
}
result = self._plugin.run(task_vars=self._task_vars)
@@ -298,13 +306,15 @@ class TestSplunkEsDataInputsMonitorRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_monitor_deleted(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return RESPONSE_PAYLOAD
@@ -326,9 +336,7 @@ class TestSplunkEsDataInputsMonitorRules:
self._plugin.search_for_resource_name = MagicMock()
self._plugin.search_for_resource_name.return_value = {}
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
self._plugin._task.args = {
"state": "deleted",
@@ -339,9 +347,7 @@ class TestSplunkEsDataInputsMonitorRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_monitor_gathered(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def get_by_path(self, path):
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py
index dbadf9052..c76eb1c4f 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py
@@ -18,27 +18,26 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils.six import PY2
+
builtin_import = "builtins.__import__"
if PY2:
builtin_import = "__builtin__.__import__"
+import copy
import tempfile
+
from ansible.playbook.task import Task
from ansible.template import Templar
-from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_network import (
- ActionModule,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
-from ansible_collections.ansible.utils.tests.unit.compat.mock import (
- MagicMock,
- patch,
-)
+from ansible_collections.ansible.utils.tests.unit.compat.mock import MagicMock, patch
+
+from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_network import ActionModule
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
RESPONSE_PAYLOAD = {
"tcp_cooked": {
@@ -51,7 +50,7 @@ RESPONSE_PAYLOAD = {
"host": "$decideOnStartup",
"restrictToHost": "default",
},
- }
+ },
],
},
"tcp_raw": {
@@ -69,7 +68,7 @@ RESPONSE_PAYLOAD = {
"source": "test_source",
"sourcetype": "test_source_type",
},
- }
+ },
],
},
"udp": {
@@ -88,7 +87,7 @@ RESPONSE_PAYLOAD = {
"source": "test_source",
"sourcetype": "test_source_type",
},
- }
+ },
],
},
"splunktcptoken": {
@@ -98,7 +97,7 @@ RESPONSE_PAYLOAD = {
"content": {
"token": "01234567-0123-0123-0123-012345678901",
},
- }
+ },
],
},
"ssl": {
@@ -106,7 +105,7 @@ RESPONSE_PAYLOAD = {
{
"name": "test_host",
"content": {},
- }
+ },
],
},
}
@@ -173,7 +172,7 @@ REPLACED_RESPONSE_PAYLOAD = {
"host": "$decideOnStartup",
"restrictToHost": "default",
},
- }
+ },
],
},
"tcp_raw": {
@@ -191,7 +190,7 @@ REPLACED_RESPONSE_PAYLOAD = {
"source": "test_source",
"sourcetype": "test_source_type",
},
- }
+ },
],
},
"udp": {
@@ -210,7 +209,7 @@ REPLACED_RESPONSE_PAYLOAD = {
"source": "test_source",
"sourcetype": "test_source_type",
},
- }
+ },
],
},
"splunktcptoken": {
@@ -220,7 +219,7 @@ REPLACED_RESPONSE_PAYLOAD = {
"content": {
"token": "01234567-0123-0123-0123-012345678900",
},
- }
+ },
],
},
}
@@ -273,7 +272,7 @@ REPLACED_REQUEST_PAYLOAD = {
class TestSplunkEsDataInputsNetworksRules:
- def setup(self):
+ def setup_method(self):
task = MagicMock(Task)
# Ansible > 2.13 looks for check_mode in task
task.check_mode = False
@@ -281,7 +280,7 @@ class TestSplunkEsDataInputsNetworksRules:
# Ansible <= 2.13 looks for check_mode in play_context
play_context.check_mode = False
connection = patch(
- "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection"
+ "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection",
)
connection._socket_path = tempfile.NamedTemporaryFile().name
fake_loader = {}
@@ -300,19 +299,19 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_merged(self, connection, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
-
# patch update operation
- update_response = RESPONSE_PAYLOAD["tcp_cooked"]
def get_by_path(self, path):
return {}
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return update_response
@@ -320,7 +319,7 @@ class TestSplunkEsDataInputsNetworksRules:
monkeypatch.setattr(SplunkRequest, "create_update", create_update)
# tcp_cooked
- update_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ update_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["tcp_cooked"]],
@@ -329,7 +328,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# tcp_raw
- update_response = RESPONSE_PAYLOAD["tcp_raw"]
+ update_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["tcp_raw"]],
@@ -338,7 +337,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# udp
- update_response = RESPONSE_PAYLOAD["udp"]
+ update_response = copy.deepcopy(RESPONSE_PAYLOAD["udp"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["udp"]],
@@ -347,7 +346,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# splunktcptoken
- update_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ update_response = copy.deepcopy(RESPONSE_PAYLOAD["splunktcptoken"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["splunktcptoken"]],
@@ -356,7 +355,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# ssl
- update_response = RESPONSE_PAYLOAD["ssl"]
+ update_response = copy.deepcopy(RESPONSE_PAYLOAD["ssl"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["ssl"]],
@@ -366,21 +365,27 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_merged_idempotent(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
# patch get operation
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
-
def get_by_path(self, path):
return get_response
+ def create_update(
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
+ ):
+ return get_response
+
monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
# tcp_cooked
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["tcp_cooked"]],
@@ -389,7 +394,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# tcp_raw
- get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["tcp_raw"]],
@@ -398,16 +403,15 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# udp
- get_response = RESPONSE_PAYLOAD["udp"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["udp"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["udp"]],
}
- result = self._plugin.run(task_vars=self._task_vars)
assert result["changed"] is False
# splunktcptoken
- get_response = RESPONSE_PAYLOAD["splunktcptoken"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["splunktcptoken"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["splunktcptoken"]],
@@ -416,7 +420,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# ssl
- get_response = RESPONSE_PAYLOAD["ssl"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["ssl"])
self._plugin._task.args = {
"state": "merged",
"config": [REQUEST_PAYLOAD["ssl"]],
@@ -426,25 +430,27 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_replaced(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
# patch get operation
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
- # patch update operation
update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
-
def delete_by_path(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return {}
def create_update(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return update_response
@@ -456,8 +462,8 @@ class TestSplunkEsDataInputsNetworksRules:
monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
# tcp_cooked
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
- update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
+ update_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]],
@@ -466,8 +472,8 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# tcp_raw
- get_response = RESPONSE_PAYLOAD["tcp_raw"]
- update_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_raw"])
+ update_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]],
@@ -476,8 +482,8 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# udp
- get_response = RESPONSE_PAYLOAD["udp"]
- update_response = REPLACED_RESPONSE_PAYLOAD["udp"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["udp"])
+ update_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["udp"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["udp"]],
@@ -486,8 +492,8 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# splunktcptoken
- get_response = RESPONSE_PAYLOAD["splunktcptoken"]
- update_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["splunktcptoken"])
+ update_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["splunktcptoken"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]],
@@ -497,23 +503,41 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_replaced_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
-
# patch get operation
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
def get_by_path(self, path):
return get_response
+ def delete_by_path(
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
+ ):
+ return {}
+
+ def create_update(
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
+ ):
+ return get_response
+
monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
+ monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path)
+ monkeypatch.setattr(SplunkRequest, "create_update", create_update)
# tcp_cooked
- get_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]],
@@ -522,7 +546,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# tcp_raw
- get_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"]
+ get_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]],
@@ -531,7 +555,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# udp
- get_response = REPLACED_RESPONSE_PAYLOAD["udp"]
+ get_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["udp"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["udp"]],
@@ -540,7 +564,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# splunktcptoken
- get_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"]
+ get_response = copy.deepcopy(REPLACED_RESPONSE_PAYLOAD["splunktcptoken"])
self._plugin._task.args = {
"state": "replaced",
"config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]],
@@ -550,17 +574,19 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_deleted(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def delete_by_path(
- self, rest_path, data=None, mock=None, mock_data=None
+ self,
+ rest_path,
+ data=None,
+ mock=None,
+ mock_data=None,
):
return {}
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
def get_by_path(self, path):
return get_response
@@ -569,7 +595,7 @@ class TestSplunkEsDataInputsNetworksRules:
monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
# tcp_cooked
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "deleted",
"config": [REQUEST_PAYLOAD["tcp_cooked"]],
@@ -578,7 +604,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is True
# tcp_raw
- get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "deleted",
"config": [REQUEST_PAYLOAD["tcp_raw"]],
@@ -606,11 +632,11 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_deleted_idempotent(
- self, conn, monkeypatch
+ self,
+ conn,
+ monkeypatch,
):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
def get_by_path(self, path):
@@ -652,13 +678,11 @@ class TestSplunkEsDataInputsNetworksRules:
@patch("ansible.module_utils.connection.Connection.__rpc__")
def test_es_data_inputs_network_gathered(self, conn, monkeypatch):
- self._plugin._connection.socket_path = (
- tempfile.NamedTemporaryFile().name
- )
+ self._plugin._connection.socket_path = tempfile.NamedTemporaryFile().name
self._plugin._connection._shell = MagicMock()
# patch get operation
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
def get_by_path(self, path):
return get_response
@@ -666,7 +690,7 @@ class TestSplunkEsDataInputsNetworksRules:
monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path)
# tcp_cooked
- get_response = RESPONSE_PAYLOAD["tcp_cooked"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_cooked"])
self._plugin._task.args = {
"state": "gathered",
"config": [REQUEST_PAYLOAD["tcp_cooked"]],
@@ -675,7 +699,7 @@ class TestSplunkEsDataInputsNetworksRules:
assert result["changed"] is False
# tcp_raw
- get_response = RESPONSE_PAYLOAD["tcp_raw"]
+ get_response = copy.deepcopy(RESPONSE_PAYLOAD["tcp_raw"])
self._plugin._task.args = {
"state": "gathered",
"config": [REQUEST_PAYLOAD["tcp_raw"]],
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py
index e19a1e04c..349e71ada 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py
@@ -2,15 +2,16 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
import pytest
-from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.six import string_types
@pytest.fixture
@@ -21,20 +22,13 @@ def patch_ansible_module(request, mocker):
if "ANSIBLE_MODULE_ARGS" not in request.param:
request.param = {"ANSIBLE_MODULE_ARGS": request.param}
if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
- request.param["ANSIBLE_MODULE_ARGS"][
- "_ansible_remote_tmp"
- ] = "/tmp"
- if (
- "_ansible_keep_remote_files"
- not in request.param["ANSIBLE_MODULE_ARGS"]
- ):
- request.param["ANSIBLE_MODULE_ARGS"][
- "_ansible_keep_remote_files"
- ] = False
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_keep_remote_files"] = False
args = json.dumps(request.param)
else:
raise Exception(
- "Malformed data to the patch_ansible_module pytest fixture"
+ "Malformed data to the patch_ansible_module pytest fixture",
)
mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args))
diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py
index d55afc0b3..923594463 100644
--- a/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py
+++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py
@@ -1,13 +1,15 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest
-from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
+from ansible_collections.splunk.es.tests.unit.compat import unittest
+from ansible_collections.splunk.es.tests.unit.compat.mock import patch
+
def set_module_args(args):
if "_ansible_remote_tmp" not in args:
@@ -41,7 +43,9 @@ def fail_json(*args, **kwargs):
class ModuleTestCase(unittest.TestCase):
def setUp(self):
self.mock_module = patch.multiple(
- basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json
+ basic.AnsibleModule,
+ exit_json=exit_json,
+ fail_json=fail_json,
)
self.mock_module.start()
self.mock_sleep = patch("time.sleep")
diff --git a/ansible_collections/splunk/es/tox.ini b/ansible_collections/splunk/es/tox.ini
index a533ccb30..6ada631cb 100644
--- a/ansible_collections/splunk/es/tox.ini
+++ b/ansible_collections/splunk/es/tox.ini
@@ -4,20 +4,18 @@ envlist = linters
skipsdist = True
[testenv]
-basepython = python3
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
-commands = find {toxinidir} -type f -name "*.py[c|o]" -delete
[testenv:black]
install_command = pip install {opts} {packages}
commands =
- black -v -l79 {toxinidir}
+ black -v {toxinidir}
[testenv:linters]
install_command = pip install {opts} {packages}
commands =
- black -v -l79 --check {toxinidir}
+ black -v --diff --check {toxinidir}
flake8 {posargs}
[testenv:venv]
@@ -27,7 +25,7 @@ commands = {posargs}
# E123, E125 skipped as they are invalid PEP-8.
show-source = True
-ignore = E123,E125,E402,E501,E741,W503
+ignore = E123,E125,E203,E402,E501,E741,F401,F811,F841,W503
max-line-length = 160
builtins = _
exclude = .git,.tox,tests/unit/compat/