diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
commit | 975f66f2eebe9dadba04f275774d4ab83f74cf25 (patch) | |
tree | 89bd26a93aaae6a25749145b7e4bca4a1e75b2be /ansible_collections/splunk | |
parent | Initial commit. (diff) | |
download | ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.tar.xz ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.zip |
Adding upstream version 7.7.0+dfsg.upstream/7.7.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/splunk')
133 files changed, 19722 insertions, 0 deletions
diff --git a/ansible_collections/splunk/es/.github/workflows/test.yml b/ansible_collections/splunk/es/.github/workflows/test.yml new file mode 100644 index 000000000..e2a19e00a --- /dev/null +++ b/ansible_collections/splunk/es/.github/workflows/test.yml @@ -0,0 +1,41 @@ +--- +name: Test collection + +concurrency: + group: ${{ github.head_ref }} + cancel-in-progress: true + +on: # yamllint disable-line rule:truthy + pull_request: + branches: [main] + workflow_dispatch: + +jobs: + changelog: + uses: ansible-network/github_actions/.github/workflows/changelog.yml@main + sanity: + uses: ansible-network/github_actions/.github/workflows/sanity.yml@main + unit-galaxy: + uses: ansible-network/github_actions/.github/workflows/unit_galaxy.yml@main + unit-source: + uses: ansible-network/github_actions/.github/workflows/unit_source.yml@main + with: + collection_pre_install: >- + git+https://github.com/ansible-collections/ansible.utils.git + git+https://github.com/ansible-collections/ansible.netcommon.git + all_green: + if: ${{ always() }} + needs: + - changelog + - sanity + - unit-galaxy + - unit-source + runs-on: ubuntu-latest + steps: + - run: >- + python -c "assert set([ + '${{ needs.changelog.result }}', + '${{ needs.sanity.result }}', + '${{ needs.unit-galaxy.result }}', + '${{ needs.unit-source.result }}' + ]) == {'success'}"
\ No newline at end of file diff --git a/ansible_collections/splunk/es/.gitignore b/ansible_collections/splunk/es/.gitignore new file mode 100644 index 000000000..53e44f6d7 --- /dev/null +++ b/ansible_collections/splunk/es/.gitignore @@ -0,0 +1,131 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +tests/output/ diff --git a/ansible_collections/splunk/es/.pre-commit-config.yaml b/ansible_collections/splunk/es/.pre-commit-config.yaml new file mode 100644 index 000000000..a4450aa3c --- /dev/null +++ b/ansible_collections/splunk/es/.pre-commit-config.yaml @@ -0,0 +1,21 @@ +--- +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.2.0 + hooks: + - id: check-merge-conflict + - id: check-symlinks + - id: debug-statements + - id: end-of-file-fixer + - id: no-commit-to-branch + args: [--branch, main] + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + args: [-l, "79"] + - repo: https://github.com/ansible-network/collection_prep + rev: 1.0.0 + hooks: + - id: update-docs diff --git a/ansible_collections/splunk/es/.yamllint b/ansible_collections/splunk/es/.yamllint new file mode 100644 index 000000000..3adaf90cf --- /dev/null +++ b/ansible_collections/splunk/es/.yamllint @@ -0,0 +1,15 @@ +--- +extends: default + +ignore: | + .tox + changelogs/* + +rules: + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + line-length: disable diff --git a/ansible_collections/splunk/es/CHANGELOG.rst b/ansible_collections/splunk/es/CHANGELOG.rst new file mode 100644 index 000000000..da4e628d2 --- /dev/null +++ b/ansible_collections/splunk/es/CHANGELOG.rst @@ -0,0 +1,59 @@ +=================================================== +Splunk Enterprise Security Collection Release Notes +=================================================== + +.. contents:: Topics + + +v2.1.0 +====== + +Minor Changes +------------- + +- splunk_adaptive_response_notable_events - Manage Adaptive Responses notable events resource module +- splunk_correlation_searches - Splunk Enterprise Security Correlation searches resource module +- splunk_data_inputs_monitor - Splunk Data Inputs of type Monitor resource module +- splunk_data_inputs_network - Manage Splunk Data Inputs of type TCP or UDP resource module + +v2.0.0 +====== + +Major Changes +------------- + +- Minimum required ansible.netcommon version is 2.5.1. +- Updated base plugin references to ansible.netcommon. + +Bugfixes +-------- + +- Fix ansible test sanity failures and fix flake8 issues. + +v1.0.2 +====== + +Release Summary +--------------- + +- Re-releasing the 1.0.2 with updated galaxy file + +v1.0.1 +====== + +Release Summary +--------------- + +- Releasing 1.0.1 with updated changelog. + +v1.0.0 +====== + +New Modules +----------- + +- splunk.es.adaptive_response_notable_event - Manage Splunk Enterprise Security Notable Event Adaptive Responses +- splunk.es.correlation_search - Manage Splunk Enterprise Security Correlation Searches +- splunk.es.correlation_search_info - Manage Splunk Enterprise Security Correlation Searches +- splunk.es.data_input_monitor - Manage Splunk Data Inputs of type Monitor +- splunk.es.data_input_network - Manage Splunk Data Inputs of type TCP or UDP diff --git a/ansible_collections/splunk/es/FILES.json b/ansible_collections/splunk/es/FILES.json new file mode 100644 index 000000000..dee0ba2d3 --- /dev/null +++ b/ansible_collections/splunk/es/FILES.json @@ -0,0 +1,1293 @@ +{ + "files": [ + { + "name": ".", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/workflows", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/workflows/test.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "abbe2f2b782e28e478a011667782bcd93a86c21f1554f5eaa772305af4d37640", + "format": 1 + }, + { + "name": "changelogs", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments/.keep", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "changelogs/changelog.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ba2ed344c4e522ff07307c59bc83f28297363d0ed60e0a5ff6a5cba44c9a9f85", + "format": 1 + }, + { + "name": "changelogs/config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a6c37d81485636d11d3658c05ae604ddcee8a2520cf831763b765b511ae5e522", + "format": 1 + }, + { + "name": "docs", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "docs/splunk.es.adaptive_response_notable_event_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b5ba38fde8ea6535297b89f3d307c4e6a4947a8e141da20614c68d31968e613f", + "format": 1 + }, + { + "name": "docs/splunk.es.correlation_search_info_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e374b7c71d8a2b47033ef37218c3f1e7669239f4ab03ae1cd24d8c39adfcee3c", + "format": 1 + }, + { + "name": "docs/splunk.es.correlation_search_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cc5f185336595c92d966668a1bf632162befa22b7b2875d180d4226d4e45d48d", + "format": 1 + }, + { + "name": "docs/splunk.es.data_input_monitor_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "871ee47e0650ef2a8eb79477b490491170cef7d4da3def7465e686e28ccd86a9", + "format": 1 + }, + { + "name": "docs/splunk.es.data_input_network_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5ab32953ec3c92411bbf989578e5e3898a8fb77a7df9500e90883cdf6b2632a8", + "format": 1 + }, + { + "name": "docs/splunk.es.splunk_adaptive_response_notable_events_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2b7867e09c61ef22dc25cbbc86cb4139afb879270fac22f26294111fd2d70773", + "format": 1 + }, + { + "name": "docs/splunk.es.splunk_correlation_searches_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5651ffdca7a73dc9d8b19025d652e6c0c15b7a387d91cd3fc7ec3f6106fed7f9", + "format": 1 + }, + { + "name": "docs/splunk.es.splunk_data_inputs_monitor_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b10ed82159024825f4f8b4ad09437c675b60f176fd6fb0f7a61390656ca99e5f", + "format": 1 + }, + { + "name": "docs/splunk.es.splunk_data_inputs_network_module.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fd0549229fd8ab0e612c37b66c5c32d5b2783cde25bc7afacec96f275a184d14", + "format": 1 + }, + { + "name": "docs/splunk.es.splunk_httpapi.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b7b00b66d8d113d97580211a4984c28a84031a259ef8649a2fc13d24f7be2adc", + "format": 1 + }, + { + "name": "meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "meta/runtime.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a5ff05bca6bd4c71c1077632fdc7010ef5bab7c015eb99dfdadf5de56e381bfd", + "format": 1 + }, + { + "name": "plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/action", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/action/splunk_adaptive_response_notable_events.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3ccad3cbf8935c826b189f915203d455e9db1076ae11c96bd44c716e7c3812e8", + "format": 1 + }, + { + "name": "plugins/action/splunk_correlation_searches.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9b50d96a7c6d982a8946939feee7061ed30508ae7fbb87f50eb2d7ad5a57bc8f", + "format": 1 + }, + { + "name": "plugins/action/splunk_data_inputs_monitor.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cd9cc47a4f0a5acb068e836cc3e8df9787bfd642aa8a3772caae3254b2d0f5bf", + "format": 1 + }, + { + "name": "plugins/action/splunk_data_inputs_network.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "be803b1d01356fa3ff6cf595f7259bdbd7bf3722945d49fd1c729bc7278bdead", + "format": 1 + }, + { + "name": "plugins/httpapi", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/httpapi/splunk.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "63a8b202d37153330a0b403a4c614072d370e0956778d2645e1767df20a92c62", + "format": 1 + }, + { + "name": "plugins/module_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/module_utils/splunk.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "01a916fef4e7984fd44a871a41ef042ffd29095fcdae8ed971ba39073069b344", + "format": 1 + }, + { + "name": "plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/modules/splunk_adaptive_response_notable_event.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a25efb25ab077bd88519ea3426fb8a13515e16036a4073a7fba3b054d6effa56", + "format": 1 + }, + { + "name": "plugins/modules/splunk_correlation_search.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c07af58dae3541b805dede95a25d557085593dc29f897d711498aedd9f284812", + "format": 1 + }, + { + "name": "plugins/modules/splunk_correlation_search_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a1c321bb59558e65920d07b8b5f664e27efcced9fd7e01c45f9a11c43faf8cbe", + "format": 1 + }, + { + "name": "plugins/modules/splunk_data_input_monitor.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6342870ecb8e2edc36d3c15496735de964b74308abdd3835350ff95512676edc", + "format": 1 + }, + { + "name": "plugins/modules/splunk_data_input_network.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "08dc398f64b71cd5d81bb1d7f82db25ed089b297f77c5fe0beb35b648d5c7310", + "format": 1 + }, + { + "name": "plugins/modules/adaptive_response_notable_event.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a25efb25ab077bd88519ea3426fb8a13515e16036a4073a7fba3b054d6effa56", + "format": 1 + }, + { + "name": "plugins/modules/correlation_search.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c07af58dae3541b805dede95a25d557085593dc29f897d711498aedd9f284812", + "format": 1 + }, + { + "name": "plugins/modules/correlation_search_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a1c321bb59558e65920d07b8b5f664e27efcced9fd7e01c45f9a11c43faf8cbe", + "format": 1 + }, + { + "name": "plugins/modules/data_input_monitor.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6342870ecb8e2edc36d3c15496735de964b74308abdd3835350ff95512676edc", + "format": 1 + }, + { + "name": "plugins/modules/data_input_network.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "08dc398f64b71cd5d81bb1d7f82db25ed089b297f77c5fe0beb35b648d5c7310", + "format": 1 + }, + { + "name": "plugins/modules/splunk_adaptive_response_notable_events.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "edd1d1cf0096053a34137462bce7b3ece10a0cacb0a88846cf280c74aa1c963a", + "format": 1 + }, + { + "name": "plugins/modules/splunk_correlation_searches.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c6271e8b5b280de34ec96d54d664fd20fb2bd4ab9a7f44b641ef2712c094628c", + "format": 1 + }, + { + "name": "plugins/modules/splunk_data_inputs_monitor.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "aa782e642c0756b61a703318d60116cb4267a0a37296f5beffe6b275afbac668", + "format": 1 + }, + { + "name": "plugins/modules/splunk_data_inputs_network.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "988f21bf5004878470902c794a652c9c388893617e45859dd126b1cbdba8d70c", + "format": 1 + }, + { + "name": "tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/adaptive_response_notable_event", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/adaptive_response_notable_event/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/adaptive_response_notable_event/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "344cff5c902b2c539ab54ef475c026d955a7c71becfeef0123295715118e706b", + "format": 1 + }, + { + "name": "tests/integration/targets/adaptive_response_notable_event/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1", + "format": 1 + }, + { + "name": "tests/integration/targets/correlation_search_info", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/correlation_search_info/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/correlation_search_info/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c6660a367fbbc59393ebba29b9e733a103bf37b58fa37a1e1520039e06b737e8", + "format": 1 + }, + { + "name": "tests/integration/targets/correlation_search_info/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1", + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_monitor", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_monitor/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_monitor/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3a7a296c1e614d16fb885bbb21cbf2f4e61e4543e583a2703ec79a679937527b", + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_monitor/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1", + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_network", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_network/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_network/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a0a3ac618c8005edf3c8dbda3493d4f7244273866c8d7b32765a4c13f7b09513", + "format": 1 + }, + { + "name": "tests/integration/targets/data_input_network/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_event", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_event/aliases", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6bb6404fafee8059a78fb870fbbdf81ebc604c86a60e2e8bc834083a422657f1", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cf632cbb514f70975d14fd05391cc480a392f951cd6a427700f40fe9b3fc41b5", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "818a2113dae79493f6f35b99bb494aa2ffed0491a8e72529195d55dd4c40b649", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b4215b589209fb50665478cb33956d81ecdf85525726f8b8ec10d274055b2b53", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cd10837d721353aedf5eed0f4bd87630ec782a1205dac2f033ccea2bd6beb862", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "706cb1d57331d919acdbdf6124adedad8bb394d9377423510d483a60b3713fe5", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "aac96a89d717e3c0e686c626deaf73be71f8a4731bd4304e328a1c485f56d242", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c0c83dd1f31499dcd5ff7236f9457bfa0cc614fa62c17a002f6d97970667d6dd", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fe97f0642079f50d8d7b02ddd3e36d6e7b004a642b8215ffde24b0df2c07ed51", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f5fbf065a46cb46b48cc237274cdb1f6e004ec2885f44990a10289cc9fc8329d", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "19fc16bcb198789fbe56f05f5b4b7bb194139b864a521958e5699b11c63e83e4", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a60dcc8eaab017ddcc9e55ef06804ea804499160cee75ca7e6dbe25c194fc48f", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6121b05fcafc9f47ead22cc8b5e212a1a821198fefdce786bbde842194d0ebea", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/merged.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "53fe8e8c6ba2cd4b7ef89d65a9b1f183ad8397fbd3f49260bd5608da896d788c", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9f73da67af2ad998e89ed256e64ccbe1e6b96d1d75a0f0227f5f0ffd9edc2605", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "86b85acedcb5e72aba8ba4005f07e46645f900100e459c27182f1e73341118c4", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7b72143772c2812141447499965f8a7a6b799dc22ce015ad4f085846df08cd20", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "eaae57d96ac6a30535e9d7f14a95a8cdbbdb8810b6aa499f6374401f2c29cb50", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "190a32c828efdc02b4ea7786c6cf95a0b92519c13fb77bae3cc3eb9d8f8e30e7", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b06bd768c434a96678643fc831fcf48d740ade9739e19b213702cfbc931c4386", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1eea1a88f4575d29e487e2b6577717d01f1ea001646ee7765a50fbfafaadf461", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2db716888bcb097e33649add10d56ed82e28d58d2baf44d334c788c898c5d6e8", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b4ba5c6add39a3f87a2159f877d7a18ddf0749ab74cc4513efdbe4feaa594ae6", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f70db8cb1ea0840ea3693b3c43f76121cd90e903becdeaf54d7c2d9b272c0842", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/defaults", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c8e82c716422654d049fd043bbf84d624ed532f96741e032f52f14c19e970d3e", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/meta/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tasks", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "07767c5c9e3656ee8556479d504d1499cc2a7f1da14c54022acbcfdc655c8926", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e7d7f58a1d24f52718c31cc560ba27eaf69da2df9e8b0d26516560b547d1d9da", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "56d91877ced6fa3159f0e3c2ead5bfea8def1503c933cbbbafeb755c6c0bedd7", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a4c9ce1df1a633f9348532c18146b0f28af7a79c9c339abd983db7d95c858952", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a0c4618e61535d79ef8ea6951e8374a91532579411dd3bbc79efd50645be8d53", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8ab841171f1275649e5f0387b3c3612f3a3120e76c4e3976062d043d3305c3d7", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "116734780772a8a3286d670d0bbc0232a3f7871050ecdaacc919071585101ab1", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "80968b09338373bf614e4510b608111cb31c4c51f0b58b9568b8c365895252a5", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2a6151569c6aafb3d8316e6beff968d80d4d9ba1260093fc53b3f53760d0db11", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "38643e8fba135049b1cd332a1bec01f175b5c570aadb713cb844268433450f34", + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/vars", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/integration/targets/splunk_data_inputs_network/vars/main.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "af5764a4d038986e76309b592c51baa99b80ee8d643778899693913705a3efa8", + "format": 1 + }, + { + "name": "tests/integration/network-integration.cfg", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d67b11263a8f50b30bf43c7c2b4bdd8dc4f173f0b5dd22761311360dfbd56a1d", + "format": 1 + }, + { + "name": "tests/integration/target-prefixes.network", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d31a0d2ea7becadb883d33f8189e1cef71c07a907bef52c2437de1348005d004", + "format": 1 + }, + { + "name": "tests/sanity", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/sanity/ignore-2.10.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a", + "format": 1 + }, + { + "name": "tests/sanity/ignore-2.11.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "783614c021deecb018573244dc973a566def9cfd8265e17ab934a1ab16b6ff0a", + "format": 1 + }, + { + "name": "tests/sanity/ignore-2.9.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "08d97dac9d8352b03ec85ec50e608cca29017b7286176a7a73b00f420e6475df", + "format": 1 + }, + { + "name": "tests/unit", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/compat/builtins.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ba13a350ade8ef804336f888d5883b8e54f8bddfb9d0fadc10277a8ca6540f4e", + "format": 1 + }, + { + "name": "tests/unit/compat/mock.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "52ecd54195edca933104eb3e937547c7395ff604ada2694a8b184c2c1466dbf1", + "format": 1 + }, + { + "name": "tests/unit/compat/unittest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3ed698b1faec43d87a2c1ebcb15a2aae48b09ff355bb9a598e5f5a1c928dbb30", + "format": 1 + }, + { + "name": "tests/unit/mock", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/mock/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/mock/loader.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "99243cafb4042ee1354d81e3f21647b18bba2b81e1bcd0d77d5487d6069740b9", + "format": 1 + }, + { + "name": "tests/unit/mock/path.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "77760b066115f34f1ecce2387f8194ba254f3dc44ed89f439f3e6adfd258cdf1", + "format": 1 + }, + { + "name": "tests/unit/mock/procenv.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d7f9e134ebd607e1b2910d62cd8997535c8a2cced4473a2bf5cdaae2233e3049", + "format": 1 + }, + { + "name": "tests/unit/mock/vault_helper.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "feae23166b6eb502f7d9b77c314970516c9a99aaad7de01295b4dfdad53c5c09", + "format": 1 + }, + { + "name": "tests/unit/mock/yaml_helper.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "94e2f3c867d2582c9f7a0e99e544718e355025c4a51c9925e70158fa89b3609e", + "format": 1 + }, + { + "name": "tests/unit/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/modules/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/modules/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2af4846e50d461a131ad3edfb609fbb39a9eb1796048c62e4ead8234bcf5c6a1", + "format": 1 + }, + { + "name": "tests/unit/modules/utils.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ecb4e4c4a3a490b49d33d043d246bea11580cfe5460e70630a793c2ffd0ff450", + "format": 1 + }, + { + "name": "tests/unit/plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/action", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/action/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/plugins/action/test_es_adaptive_response_notable_events.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2ce521e25bf0bb6ebfa6ff498bbcb272a9ab62c2ddd8a79c4ca84e977a93f5c0", + "format": 1 + }, + { + "name": "tests/unit/plugins/action/test_es_correlation_searches.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "711a328eafd5199109f2d7d90f8a7336df70515444e6c552282809895a7777b9", + "format": 1 + }, + { + "name": "tests/unit/plugins/action/test_es_data_inputs_monitors.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "adeada7519c366c59c0fa3af0e1fdbbe1544ba780f9d43d29235364205b6376e", + "format": 1 + }, + { + "name": "tests/unit/plugins/action/test_es_data_inputs_network.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "64822f8ae4580b5e1fd4e8f1dccc201b2400c1bb3241908126f66812197a2a4b", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/conftest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2af4846e50d461a131ad3edfb609fbb39a9eb1796048c62e4ead8234bcf5c6a1", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/utils.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ecb4e4c4a3a490b49d33d043d246bea11580cfe5460e70630a793c2ffd0ff450", + "format": 1 + }, + { + "name": "tests/unit/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "49ba996dc4735c3463e9af561344346dfae14bcc1a68096ce78364b377f0df1f", + "format": 1 + }, + { + "name": "tests/.keep", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": ".gitignore", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0fabb75e1cd5126877f40c30627431348b568ccd5d2df55fcbcfff03fc9d378d", + "format": 1 + }, + { + "name": ".pre-commit-config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0ffea7c1ca77ac9b2775cb35aea17410b1d8dc0de785de7830e08c870a4a95fd", + "format": 1 + }, + { + "name": ".yamllint", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "827ef9e031ecdcaf137be239d33ef93fcbbc3611cbb6b30b0e507d0e03373d0e", + "format": 1 + }, + { + "name": "CHANGELOG.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "44b1f26af8e504a426e7f976c7dea43ffb5e1d51329aeb8238b8303a63503128", + "format": 1 + }, + { + "name": "LICENSE", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986", + "format": 1 + }, + { + "name": "README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8097af964343852b59aedd108bc36f05d7a849da0a618f3d5fd9aa49108df653", + "format": 1 + }, + { + "name": "bindep.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "f20e24c3b24e3738a72623924e20848bb3bab9ea951099b7d2fcce091b9673a8", + "format": 1 + }, + { + "name": "pyproject.toml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d3128f98117f549f979538e147feb1e53fc5bac8b98e22e1a7504767b692f533", + "format": 1 + }, + { + "name": "requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "test-requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "eaa5e13a5ebe3344585b2e5ac61a6974a6d5b132f13a815d3a0f68c36ecfe8ad", + "format": 1 + }, + { + "name": "tox.ini", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fa849abc071b00742c00b96e3df480355a2af2c60e4c920f085a9ac5616d8d4b", + "format": 1 + } + ], + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/splunk/es/LICENSE b/ansible_collections/splunk/es/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/ansible_collections/splunk/es/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<https://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<https://www.gnu.org/licenses/why-not-lgpl.html>. diff --git a/ansible_collections/splunk/es/MANIFEST.json b/ansible_collections/splunk/es/MANIFEST.json new file mode 100644 index 000000000..c420430bb --- /dev/null +++ b/ansible_collections/splunk/es/MANIFEST.json @@ -0,0 +1,35 @@ +{ + "collection_info": { + "namespace": "splunk", + "name": "es", + "version": "2.1.0", + "authors": [ + "Ansible Seurity Team (https://github.com/ansible-security)" + ], + "readme": "README.md", + "tags": [ + "security", + "splunk" + ], + "description": "Ansible Security Collection for Splunk Enterprise Security SIEM", + "license": [ + "GPL-3.0-or-later" + ], + "license_file": null, + "dependencies": { + "ansible.netcommon": ">=2.5.1" + }, + "repository": "https://github.com/ansible-collections/splunk.es", + "documentation": "https://github.com/ansible-collections/splunk.es", + "homepage": "https://github.com/ansible-collections/splunk.es", + "issues": "https://github.com/ansible-collections/splunk.es/issues" + }, + "file_manifest_file": { + "name": "FILES.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a812354f9fca21bf13425317ff0e8df3471be2b5c3510e889388fcbaefc924c4", + "format": 1 + }, + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/splunk/es/README.md b/ansible_collections/splunk/es/README.md new file mode 100644 index 000000000..cc7c0037a --- /dev/null +++ b/ansible_collections/splunk/es/README.md @@ -0,0 +1,290 @@ +# Splunk Enterprise Security Ansible Collection + +[![CI](https://zuul-ci.org/gated.svg)](https://dashboard.zuul.ansible.com/t/ansible/project/github.com/ansible-collections/splunk.es) <!--[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/splunk.es)](https://codecov.io/gh/ansible-collections/splunk.es)--> + +This is the [Ansible +Collection](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) +provided by the [Ansible Security Automation +Team](https://github.com/ansible-security) for automating actions in +[Splunk Enterprise Security SIEM](https://www.splunk.com/en_us/software/enterprise-security.html) + +This Collection is meant for distribution through +[Ansible Galaxy](https://galaxy.ansible.com/) as is available for all +[Ansible](https://github.com/ansible/ansible) users to utilize, contribute to, +and provide feedback about. + +<!--start requires_ansible--> +## Ansible version compatibility + +This collection has been tested against following Ansible versions: **>=2.9.10**. + +Plugins and modules within a collection may be tested with only specific Ansible versions. +A collection may contain metadata that identifies these versions. +PEP440 is the schema used to describe the versions of Ansible. +<!--end requires_ansible--> + +## Collection Content + +<!--start collection content--> +### Httpapi plugins +Name | Description +--- | --- +[splunk.es.splunk](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.splunk_httpapi.rst)|HttpApi Plugin for Splunk + +### Modules +Name | Description +--- | --- +[splunk.es.adaptive_response_notable_event](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.adaptive_response_notable_event_module.rst)|Manage Splunk Enterprise Security Notable Event Adaptive Responses +[splunk.es.correlation_search](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.correlation_search_module.rst)|Manage Splunk Enterprise Security Correlation Searches +[splunk.es.correlation_search_info](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.correlation_search_info_module.rst)|Manage Splunk Enterprise Security Correlation Searches +[splunk.es.data_input_monitor](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.data_input_monitor_module.rst)|Manage Splunk Data Inputs of type Monitor +[splunk.es.data_input_network](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.data_input_network_module.rst)|Manage Splunk Data Inputs of type TCP or UDP +[splunk.es.splunk_adaptive_response_notable_events](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst)|Manage Adaptive Responses notable events resource module +[splunk.es.splunk_correlation_searches](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.splunk_correlation_searches_module.rst)|Splunk Enterprise Security Correlation searches resource module +[splunk.es.splunk_data_inputs_monitor](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.splunk_data_inputs_monitor_module.rst)|Splunk Data Inputs of type Monitor resource module +[splunk.es.splunk_data_inputs_network](https://github.com/ansible-collections/splunk.es/blob/main/docs/splunk.es.splunk_data_inputs_network_module.rst)|Manage Splunk Data Inputs of type TCP or UDP resource module + +<!--end collection content--> + +### Supported connections + +Use splunk modules with the [`httpapi` connection +plugin](https://docs.ansible.com/ansible/latest/plugins/connection/httpapi.html). +Set certain attributes in the inventory as follows: + +Example `inventory.ini`: + +**NOTE:** The passwords should be stored in a secure location or an [Ansible +Vault](https://docs.ansible.com/ansible/latest/user_guide/vault.html) + +**NOTE:** the default port for Splunk's REST API is 8089 + + [splunk] + splunk.example.com + + [splunk:vars] + ansible_network_os=splunk.es.splunk + ansible_user=admin + ansible_httpapi_pass=my_super_secret_admin_password + ansible_httpapi_port=8089 + ansible_httpapi_use_ssl=yes + ansible_httpapi_validate_certs=True + ansible_connection=httpapi + +## Installing this collection + +You can install the splunk collection with the Ansible Galaxy CLI: + + ansible-galaxy collection install splunk.es + +You can also include it in a `requirements.yml` file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format: + +```yaml +--- +collections: + - name: splunk.es +``` + +## Using this collection + +**NOTE**: For Ansible 2.9, you may not see deprecation warnings when you run your playbooks with this collection. Use this documentation to track when a module is deprecated. + +An example for using this collection to manage a log source with [Splunk Enterprise Security SIEM](https://www.splunk.com/en_us/software/enterprise-security.html) is as follows. + +`inventory.ini` (Note the password should be managed by a [Vault](https://docs.ansible.com/ansible/latest/user_guide/vault.html) for a production environment. + +``` +[splunk] +splunk.example.com + +[splunk:vars] +ansible_network_os=splunk.es.splunk +ansible_user=admin +ansible_httpapi_pass=my_super_secret_admin_password +ansible_httpapi_port=8089 +ansible_httpapi_use_ssl=yes +ansible_httpapi_validate_certs=True +ansible_connection=httpapi +``` + +### Using the modules with Fully Qualified Collection Name (FQCN) + +With [Ansible +Collections](https://docs.ansible.com/ansible/latest/dev_guide/developing_collections.html) +there are various ways to utilize them either by calling specific Content from +the Collection, such as a module, by it's Fully Qualified Collection Name (FQCN) +as we'll show in this example or by defining a Collection Search Path as the +examples below will display. + +We recommend the FQCN method but the +shorthand options listed below exist for convenience. + +`splunk_with_collections_fqcn_example.yml` + +``` +--- +- name: demo splunk + hosts: splunk + gather_facts: False + tasks: + - name: test splunk_data_input_monitor + splunk.es.data_input_monitor: + name: "/var/log/demo.log" + state: "present" + recursive: True + - name: test splunk_data_input_network + splunk.es.data_input_network: + name: "9001" + protocol: "tcp" + state: "absent" + - name: test splunk_coorelation_search + splunk.es.correlation_search: + name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo Coorelation Search From Playbook, description." + search: 'source="/var/log/snort.log"' + state: "present" + - name: test splunk_adaptive_response_notable_event + splunk.es.adaptive_response_notable_event: + name: "Demo notable event from playbook" + correlation_search_name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo notable event from playbook, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script +``` + +### Define your collection search path at the Play level + +Below we specify our collection at the Play level which allows us to use the +splunk modules without specifying the need for the FQCN. + +`splunk_with_collections_example.yml` + +``` +--- +- name: demo splunk + hosts: splunk + gather_facts: False + collections: + - splunk.es + tasks: + - name: test splunk_data_input_monitor + data_input_monitor: + name: "/var/log/demo.log" + state: "present" + recursive: True + - name: test splunk_data_input_network + data_input_network: + name: "9001" + protocol: "tcp" + state: "absent" + - name: test splunk_coorelation_search + correlation_search: + name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo Coorelation Search From Playbook, description." + search: 'source="/var/log/snort.log"' + state: "present" + - name: test splunk_adaptive_response_notable_event + adaptive_response_notable_event: + name: "Demo notable event from playbook" + correlation_search_name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo notable event from playbook, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script +``` + +### Define your collection search path at the Block level + +Below we use the [`block`](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html) +level keyword, we are able to use the splunk modules without the need for the +FQCN. + +`splunk_with_collections_block_example.yml` + +``` +--- +- name: demo splunk + hosts: splunk + gather_facts: False + tasks: + - name: collection namespace block + - name: test splunk_data_input_monitor + data_input_monitor: + name: "/var/log/demo.log" + state: "present" + recursive: True + - name: test splunk_data_input_network + data_input_network: + name: "9001" + protocol: "tcp" + state: "absent" + - name: test splunk_coorelation_search + correlation_search: + name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo Coorelation Search From Playbook, description." + search: 'source="/var/log/snort.log"' + state: "present" + - name: test splunk_adaptive_response_notable_event + adaptive_response_notable_event: + name: "Demo notable event from playbook" + correlation_search_name: "Test Demo Coorelation Search From Playbook" + description: "Test Demo notable event from playbook, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + collections: + - splunk.es +``` + +## Contributing to this collection + +We welcome community contributions to this collection. If you find problems, please open an issue or create a PR against the [Splunk collection repository](https://github.com/ansible-collections/splunk.es). See [Contributing to Ansible-maintained collections](https://docs.ansible.com/ansible/devel/community/contributing_maintained_collections.html#contributing-maintained-collections) for complete details. + +You can also join us on: + +- IRC - the `#ansible-security` [irc.libera.chat](https://libera.chat/) channel + +See the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html) for details on contributing to Ansible. + +### Code of Conduct + +This collection follows the Ansible project's +[Code of Conduct](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html). +Please read and familiarize yourself with this document. + +## Release notes + +Release notes are available [here](https://github.com/ansible-collections/splunk.es/blob/main/changelogs/CHANGELOG.rst). + +## Roadmap + +<!-- Optional. Include the roadmap for this collection, and the proposed release/versioning strategy so users can anticipate the upgrade/update cycle. --> + +## More information + +- [Ansible network resources](https://docs.ansible.com/ansible/latest/network/getting_started/network_resources.html) +- [Ansible Collection overview](https://github.com/ansible-collections/overview) +- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html) +- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html) +- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) + +## Licensing + +GNU General Public License v3.0 or later. + +See [LICENSE](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text. + +## Author Information + +[Ansible Security Automation Team](https://github.com/ansible-security) + diff --git a/ansible_collections/splunk/es/bindep.txt b/ansible_collections/splunk/es/bindep.txt new file mode 100644 index 000000000..1eeaeb4de --- /dev/null +++ b/ansible_collections/splunk/es/bindep.txt @@ -0,0 +1,8 @@ +# This is a cross-platform list tracking distribution packages needed by tests; +# see https://docs.openstack.org/infra/bindep/ for additional information. + +gcc-c++ [doc test platform:rpm] +python3-devel [test platform:rpm] +python3 [test platform:rpm] +libssh-devel [test platform:rpm] +libssh-dev [test platform:dpkg]
\ No newline at end of file diff --git a/ansible_collections/splunk/es/changelogs/changelog.yaml b/ansible_collections/splunk/es/changelogs/changelog.yaml new file mode 100644 index 000000000..4ac8caeb8 --- /dev/null +++ b/ansible_collections/splunk/es/changelogs/changelog.yaml @@ -0,0 +1,69 @@ +ancestor: null +releases: + 1.0.0: + modules: + - description: Manage Splunk Enterprise Security Notable Event Adaptive Responses + name: splunk.es.adaptive_response_notable_event + namespace: '' + - description: Manage Splunk Enterprise Security Correlation Searches + name: splunk.es.correlation_search + namespace: '' + - description: Manage Splunk Enterprise Security Correlation Searches + name: splunk.es.correlation_search_info + namespace: '' + - description: Manage Splunk Data Inputs of type Monitor + name: splunk.es.data_input_monitor + namespace: '' + - description: Manage Splunk Data Inputs of type TCP or UDP + name: splunk.es.data_input_network + namespace: '' + release_date: '2020-06-22' + 1.0.1: + changes: + release_summary: + - Releasing 1.0.1 with updated changelog. + release_date: '2020-08-28' + 1.0.2: + changes: + release_summary: + - Re-releasing 1.0.1 with updated galaxy file. + release_date: '2020-09-1' + 2.0.0: + changes: + bugfixes: + - Fix ansible test sanity failures and fix flake8 issues. + major_changes: + - Minimum required ansible.netcommon version is 2.5.1. + - Updated base plugin references to ansible.netcommon. + fragments: + - fix_sanity_issues.yaml + - netcommon_ref_update.yaml + - update_tests_unit_black_requirements.yaml + release_date: '2022-04-29' + 2.1.0: + changes: + minor_changes: + - Added adaptive_response_notable_events resource module + - Added data_inputs_monitors resource module + - Added data_inputs_networks resource module + - Added correlation_searches resource module + fragments: + - data_inputs_monitors.yaml + - data_inputs_networks.yaml + - fix_adaptive_response_ne_description.yaml + - fix_doc_for_sanity_failures.yaml + - splunk_adaptive_response_notable_events.yaml + modules: + - description: Manage Adaptive Responses notable events resource module + name: splunk_adaptive_response_notable_events + namespace: ansible_collections.splunk.es.plugins.modules + - description: Splunk Enterprise Security Correlation searches resource module + name: splunk_correlation_searches + namespace: ansible_collections.splunk.es.plugins.modules + - description: Splunk Data Inputs of type Monitor resource module + name: splunk_data_inputs_monitor + namespace: ansible_collections.splunk.es.plugins.modules + - description: Manage Splunk Data Inputs of type TCP or UDP resource module + name: splunk_data_inputs_network + namespace: ansible_collections.splunk.es.plugins.modules + release_date: '2022-09-07' diff --git a/ansible_collections/splunk/es/changelogs/config.yaml b/ansible_collections/splunk/es/changelogs/config.yaml new file mode 100644 index 000000000..3988ea9bc --- /dev/null +++ b/ansible_collections/splunk/es/changelogs/config.yaml @@ -0,0 +1,30 @@ +changelog_filename_template: CHANGELOG.rst +changelog_filename_version_depth: 0 +changes_file: changelog.yaml +changes_format: combined +keep_fragments: false +mention_ancestor: true +new_plugins_after_name: removed_features +notesdir: fragments +prelude_section_name: release_summary +prelude_section_title: Release Summary +flatmap: true +sections: +- - major_changes + - Major Changes +- - minor_changes + - Minor Changes +- - breaking_changes + - Breaking Changes / Porting Guide +- - deprecated_features + - Deprecated Features +- - removed_features + - Removed Features (previously deprecated) +- - security_fixes + - Security Fixes +- - bugfixes + - Bugfixes +- - known_issues + - Known Issues +title: Splunk Enterprise Security Collection +trivial_section_name: trivial diff --git a/ansible_collections/splunk/es/changelogs/fragments/.keep b/ansible_collections/splunk/es/changelogs/fragments/.keep new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/changelogs/fragments/.keep diff --git a/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst b/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst new file mode 100644 index 000000000..4f2462652 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.adaptive_response_notable_event_module.rst @@ -0,0 +1,390 @@ +.. _splunk.es.adaptive_response_notable_event_module: + + +***************************************** +splunk.es.adaptive_response_notable_event +***************************************** + +**Manage Splunk Enterprise Security Notable Event Adaptive Responses** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + +DEPRECATED +---------- +:Removed in collection release after 2024-09-01 +:Why: Newer and updated modules released with more functionality. +:Alternative: splunk_adaptive_response_notable_events + + + +Synopsis +-------- +- This module allows for creation, deletion, and modification of Splunk Enterprise Security Notable Event Adaptive Responses that are associated with a correlation search + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>asset_extraction</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>src</b> ←</div></li> + <li><div style="color: blue"><b>dest</b> ←</div></li> + <li><div style="color: blue"><b>dvc</b> ←</div></li> + <li><div style="color: blue"><b>orig_host</b> ←</div></li> + </ul> + <b>Default:</b><br/><div style="color: blue">["src", "dest", "dvc", "orig_host"]</div> + </td> + <td> + <div>list of assets to extract, select any one or many of the available choices</div> + <div>defaults to all available choices</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>correlation_search_name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of correlation search to associate this notable event adaptive response with</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>default_owner</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Default owner of the notable event, if unset it will default to Splunk System Defaults</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>default_status</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>unassigned</li> + <li>new</li> + <li>in progress</li> + <li>pending</li> + <li>resolved</li> + <li>closed</li> + </ul> + </td> + <td> + <div>Default status of the notable event, if unset it will default to Splunk System Defaults</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>description</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Description of the notable event, this will populate the description field for the web console</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drill_down_earliest_offset</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"$info_min_time$"</div> + </td> + <td> + <div>Set the amount of time before the triggering event to search for related events. For example, 2h. Use "$info_min_time$" to set the drill-down time to match the earliest time of the search</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drill_down_latest_offset</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"$info_max_time$"</div> + </td> + <td> + <div>Set the amount of time after the triggering event to search for related events. For example, 1m. Use "$info_max_time$" to set the drill-down time to match the latest time of the search</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drill_down_name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Name for drill down search, Supports variable substitution with fields from the matching event.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drill_down_search</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Drill down search, Supports variable substitution with fields from the matching event.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>identity_extraction</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>user</b> ←</div></li> + <li><div style="color: blue"><b>src_user</b> ←</div></li> + </ul> + <b>Default:</b><br/><div style="color: blue">["user", "src_user"]</div> + </td> + <td> + <div>list of identity fields to extract, select any one or many of the available choices</div> + <div>defaults to all available choices</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>investigation_profiles</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Investigation profile to assiciate the notable event with.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of notable event</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>next_steps</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>List of adaptive responses that should be run next</div> + <div>Describe next steps and response actions that an analyst could take to address this threat.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>recommended_actions</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>List of adaptive responses that are recommended to be run next</div> + <div>Identifying Recommended Adaptive Responses will highlight those actions for the analyst when looking at the list of response actions available, making it easier to find them among the longer list of available actions.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>security_domain</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>access</li> + <li>endpoint</li> + <li>network</li> + <li><div style="color: blue"><b>threat</b> ←</div></li> + <li>identity</li> + <li>audit</li> + </ul> + </td> + <td> + <div>Splunk Security Domain</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>severity</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>informational</li> + <li>low</li> + <li>medium</li> + <li><div style="color: blue"><b>high</b> ←</div></li> + <li>critical</li> + <li>unknown</li> + </ul> + </td> + <td> + <div>Severity rating</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>present</li> + <li>absent</li> + </ul> + </td> + <td> + <div>Add or remove a data source.</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + - name: Example of using splunk.es.adaptive_response_notable_event module + splunk.es.adaptive_response_notable_event: + name: "Example notable event from Ansible" + correlation_search_name: "Example Correlation Search From Ansible" + description: "Example notable event from Ansible, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + - ansiblesecurityautomation + + + + +Status +------ + + +- This module will be removed in a release after 2024-09-01. *[deprecated]* +- For more information see `DEPRECATED`_. + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.correlation_search_info_module.rst b/ansible_collections/splunk/es/docs/splunk.es.correlation_search_info_module.rst new file mode 100644 index 000000000..993d65637 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.correlation_search_info_module.rst @@ -0,0 +1,81 @@ +.. _splunk.es.correlation_search_info_module: + + +********************************* +splunk.es.correlation_search_info +********************************* + +**Manage Splunk Enterprise Security Correlation Searches** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- This module allows for the query of Splunk Enterprise Security Correlation Searches + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of coorelation search</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + - name: Example usage of splunk.es.correlation_search_info + splunk.es.correlation_search_info: + name: "Name of correlation search" + register: scorrelation_search_info + + - name: debug display information gathered + debug: + var: scorrelation_search_info + + + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.correlation_search_module.rst b/ansible_collections/splunk/es/docs/splunk.es.correlation_search_module.rst new file mode 100644 index 000000000..bde5ed420 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.correlation_search_module.rst @@ -0,0 +1,398 @@ +.. _splunk.es.correlation_search_module: + + +**************************** +splunk.es.correlation_search +**************************** + +**Manage Splunk Enterprise Security Correlation Searches** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + +DEPRECATED +---------- +:Removed in collection release after 2024-09-01 +:Why: Newer and updated modules released with more functionality. +:Alternative: splunk_correlation_searches + + + +Synopsis +-------- +- This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>app</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"SplunkEnterpriseSecuritySuite"</div> + </td> + <td> + <div>Splunk app to associate the correlation seach with</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>cron_schedule</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"*/5 * * * *"</div> + </td> + <td> + <div>Enter a cron-style schedule.</div> + <div>For example <code>'*/5 * * * *'</code> (every 5 minutes) or <code>'0 21 * * *'</code> (every day at 9 PM).</div> + <div>Real-time searches use a default schedule of <code>'*/5 * * * *'</code>.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>description</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Description of the coorelation search, this will populate the description field for the web console</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of coorelation search</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>schedule_priority</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>Default</b> ←</div></li> + <li>Higher</li> + <li>Highest</li> + </ul> + </td> + <td> + <div>Raise the scheduling priority of a report. Set to "Higher" to prioritize it above other searches of the same scheduling mode, or "Highest" to prioritize it above other searches regardless of mode. Use with discretion.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>schedule_window</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"0"</div> + </td> + <td> + <div>Let report run at any time within a window that opens at its scheduled run time, to improve efficiency when there are many concurrently scheduled reports. The "auto" setting automatically determines the best window width for the report.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>scheduling</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>real-time</b> ←</div></li> + <li>continuous</li> + </ul> + </td> + <td> + <div>Controls the way the scheduler computes the next execution time of a scheduled search.</div> + <div>Learn more: https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>search</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>SPL search string</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>present</li> + <li>absent</li> + <li>enabled</li> + <li>disabled</li> + </ul> + </td> + <td> + <div>Add, remove, enable, or disiable a correlation search.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>suppress_alerts</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>To suppress alerts from this correlation search or not</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>throttle_fields_to_group_by</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Type the fields to consider for matching events for throttling.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>throttle_window_duration</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>How much time to ignore other events that match the field values specified in Fields to group by.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_earliest</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"-24h"</div> + </td> + <td> + <div>Earliest time using relative time modifiers.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_latest</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"now"</div> + </td> + <td> + <div>Latest time using relative time modifiers.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>number of events</b> ←</div></li> + <li>number of results</li> + <li>number of hosts</li> + <li>number of sources</li> + </ul> + </td> + <td> + <div>Raise the scheduling priority of a report. Set to "Higher" to prioritize it above other searches of the same scheduling mode, or "Highest" to prioritize it above other searches regardless of mode. Use with discretion.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when_condition</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>greater than</b> ←</div></li> + <li>less than</li> + <li>equal to</li> + <li>not equal to</li> + <li>drops by</li> + <li>rises by</li> + </ul> + </td> + <td> + <div>Conditional to pass to <code>trigger_alert_when</code></div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when_value</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"10"</div> + </td> + <td> + <div>Value to pass to <code>trigger_alert_when</code></div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ui_dispatch_context</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Set an app to use for links such as the drill-down search in a notable event or links in an email adaptive response action. If None, uses the Application Context.</div> + </td> + </tr> + </table> + <br/> + + +Notes +----- + +.. note:: + - The following options are not yet supported: throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions + + + + +Examples +-------- + +.. code-block:: yaml + + - name: Example of creating a correlation search with splunk.es.coorelation_search + splunk.es.correlation_search: + name: "Example Coorelation Search From Ansible" + description: "Example Coorelation Search From Ansible, description." + search: 'source="/var/log/snort.log"' + state: "present" + + + + +Status +------ + + +- This module will be removed in a release after 2024-09-01. *[deprecated]* +- For more information see `DEPRECATED`_. + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst b/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst new file mode 100644 index 000000000..e4b7beb00 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.data_input_monitor_module.rst @@ -0,0 +1,370 @@ +.. _splunk.es.data_input_monitor_module: + + +**************************** +splunk.es.data_input_monitor +**************************** + +**Manage Splunk Data Inputs of type Monitor** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + +DEPRECATED +---------- +:Removed in collection release after 2024-09-01 +:Why: Newer and updated modules released with more functionality. +:Alternative: splunk_data_inputs_monitor + + + +Synopsis +-------- +- This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk. + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>blacklist</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>check_index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, the index value is checked to ensure that it is the name of a valid index.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>check_path</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, the name value is checked to ensure that it exists.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>crc_salt</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>A string that modifies the file tracking identity for files in this input. The magic value <SOURCE> invokes special behavior (see admin documentation).</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>disabled</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>Indicates if input monitoring is disabled.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>followTail</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, files that are seen for the first time is read from the end.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The value to populate in the host field for events from this data input.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host_regex</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. If the path for a file matches this regular expression, the captured value is used to populate the host field for events from this data input. The regular expression must have one capture group.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host_segment</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + </td> + <td> + <div>Use the specified slash-separate segment of the filepath as the host field value.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ignore_older_than</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a time value. If the modification time of a file being monitored falls outside of this rolling time window, the file is no longer being monitored.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Which index events from this input should be stored in. Defaults to default.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>The file or directory path to monitor on the system.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>recursive</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>Setting this to False prevents monitoring of any subdirectories encountered within this data input.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>rename_source</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The value to populate in the source field for events from this data input. The same source should not be used for multiple data inputs.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>sourcetype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The value to populate in the sourcetype field for incoming events.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>present</li> + <li>absent</li> + </ul> + </td> + <td> + <div>Add or remove a data source.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_before_close</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + </td> + <td> + <div>When Splunk software reaches the end of a file that is being read, the file is kept open for a minimum of the number of seconds specified in this value. After this period has elapsed, the file is checked again for more data.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>whitelist</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + - name: Example adding data input monitor with splunk.es.data_input_monitor + splunk.es.data_input_monitor: + name: "/var/log/example.log" + state: "present" + recursive: True + + + + +Status +------ + + +- This module will be removed in a release after 2024-09-01. *[deprecated]* +- For more information see `DEPRECATED`_. + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst b/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst new file mode 100644 index 000000000..fb48a05d7 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.data_input_network_module.rst @@ -0,0 +1,309 @@ +.. _splunk.es.data_input_network_module: + + +**************************** +splunk.es.data_input_network +**************************** + +**Manage Splunk Data Inputs of type TCP or UDP** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + +DEPRECATED +---------- +:Removed in collection release after 2024-09-01 +:Why: Newer and updated modules released with more functionality. +:Alternative: splunk_data_inputs_network + + + +Synopsis +-------- +- This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk. + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>connection_host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>ip</b> ←</div></li> + <li>dns</li> + <li>none</li> + </ul> + </td> + <td> + <div>Set the host for the remote server that is sending data.</div> + <div><code>ip</code> sets the host to the IP address of the remote server sending data.</div> + <div><code>dns</code> sets the host to the reverse DNS entry for the IP address of the remote server sending data.</div> + <div><code>none</code> leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>datatype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>cooked</li> + <li><div style="color: blue"><b>raw</b> ←</div></li> + </ul> + </td> + <td> + <div>Forwarders can transmit three types of data: raw, unparsed, or parsed. <code>cooked</code> data refers to parsed and unparsed formats.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Host from which the indexer gets data.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>default Index to store generated events.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>The input port which receives raw data.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>protocol</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>tcp</li> + <li>udp</li> + </ul> + </td> + <td> + <div>Choose between tcp or udp</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>queue</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>parsingQueue</b> ←</div></li> + <li>indexQueue</li> + </ul> + </td> + <td> + <div>Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.</div> + <div>Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"</div> + <div>Set queue to indexQueue to send your data directly into the index.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>rawTcpDoneTimeout</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">10</div> + </td> + <td> + <div>Specifies in seconds the timeout value for adding a Done-key.</div> + <div>If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>restrictToHost</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Allows for restricting this input to only accept data from the host specified here.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>source</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Sets the source key/field for events from this input. Defaults to the input file path.</div> + <div>Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.</div> + <div>Note: Overriding the source key is generally not recommended. Typically, the input layer provides a more accurate string to aid in problem analysis and investigation, accurately recording the file from which the data was retrieved. Consider use of source types, tagging, and search wildcards before overriding this value.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>sourcetype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Set the source type for events from this input.</div> + <div>"sourcetype=" is automatically prepended to <string>.</div> + <div>Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ssl</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>Enable or disble ssl for the data stream</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>present</b> ←</div></li> + <li>absent</li> + <li>enabled</li> + <li>disable</li> + </ul> + </td> + <td> + <div>Enable, disable, create, or destroy</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + - name: Example adding data input network with splunk.es.data_input_network + splunk.es.data_input_network: + name: "8099" + protocol: "tcp" + state: "present" + + + + +Status +------ + + +- This module will be removed in a release after 2024-09-01. *[deprecated]* +- For more information see `DEPRECATED`_. + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst new file mode 100644 index 000000000..4838de449 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_adaptive_response_notable_events_module.rst @@ -0,0 +1,846 @@ +.. _splunk.es.splunk_adaptive_response_notable_events_module: + + +************************************************* +splunk.es.splunk_adaptive_response_notable_events +************************************************* + +**Manage Adaptive Responses notable events resource module** + + +Version added: 2.1.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- This module allows for creation, deletion, and modification of Splunk Enterprise Security Notable Event Adaptive Responses that are associated with a correlation search +- Tested against Splunk Enterprise Server 8.2.3 + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="3">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Configure file and directory monitoring on the system</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>correlation_search_name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of correlation search to associate this notable event adaptive response with</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>default_owner</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Default owner of the notable event, if unset it will default to Splunk System Defaults</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>default_status</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>unassigned</li> + <li>new</li> + <li>in progress</li> + <li>pending</li> + <li>resolved</li> + <li>closed</li> + </ul> + </td> + <td> + <div>Default status of the notable event, if unset it will default to Splunk System Defaults</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>description</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Description of the notable event, this will populate the description field for the web console</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drilldown_earliest_offset</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"$info_min_time$"</div> + </td> + <td> + <div>Set the amount of time before the triggering event to search for related events. For example, 2h. Use '$info_min_time$' to set the drill-down time to match the earliest time of the search</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drilldown_latest_offset</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"$info_max_time$"</div> + </td> + <td> + <div>Set the amount of time after the triggering event to search for related events. For example, 1m. Use '$info_max_time$' to set the drill-down time to match the latest time of the search</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drilldown_name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Name for drill down search, Supports variable substitution with fields from the matching event.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>drilldown_search</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Drill down search, Supports variable substitution with fields from the matching event.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>extract_artifacts</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Assets and identities to be extracted</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>asset</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>src</li> + <li>dest</li> + <li>dvc</li> + <li>orig_host</li> + </ul> + </td> + <td> + <div>list of assets to extract, select any one or many of the available choices</div> + <div>defaults to all available choices</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>file</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>list of files to extract</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>identity</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>user</li> + <li>src_user</li> + <li>src_user_id</li> + <li>user_id</li> + <li>src_user_role</li> + <li>user_role</li> + <li>vendor_account</li> + </ul> + </td> + <td> + <div>list of identity fields to extract, select any one or many of the available choices</div> + <div>defaults to 'user' and 'src_user'</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>url</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>list of URLs to extract</div> + </td> + </tr> + + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>investigation_profiles</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Investigation profile to associate the notable event with.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of notable event</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>next_steps</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>List of adaptive responses that should be run next</div> + <div>Describe next steps and response actions that an analyst could take to address this threat.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>recommended_actions</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>List of adaptive responses that are recommended to be run next</div> + <div>Identifying Recommended Adaptive Responses will highlight those actions for the analyst when looking at the list of response actions available, making it easier to find them among the longer list of available actions.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>security_domain</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>access</li> + <li>endpoint</li> + <li>network</li> + <li><div style="color: blue"><b>threat</b> ←</div></li> + <li>identity</li> + <li>audit</li> + </ul> + </td> + <td> + <div>Splunk Security Domain</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>severity</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>informational</li> + <li>low</li> + <li>medium</li> + <li><div style="color: blue"><b>high</b> ←</div></li> + <li>critical</li> + <li>unknown</li> + </ul> + </td> + <td> + <div>Severity rating</div> + </td> + </tr> + + <tr> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>running_config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The module, by default, will connect to the remote device and retrieve the current running-config to use as a base for comparing against the contents of source. There are times when it is not desirable to have the task get the current running-config for every task in a playbook. The <em>running_config</em> argument allows the implementer to pass in the configuration to use as the base config for comparison. This value of this option should be the output received from device by executing command.</div> + </td> + </tr> + <tr> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>merged</b> ←</div></li> + <li>replaced</li> + <li>deleted</li> + <li>gathered</li> + </ul> + </td> + <td> + <div>The state the configuration should be left in</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + # Using gathered + # -------------- + + - name: Gather adaptive response notable events config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + - correlation_search_name: Ansible Test 2 + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "correlation_search_name": "Ansible Test", + # "description": "test notable event", + # "drilldown_earliest_offset": "$info_min_time$", + # "drilldown_latest_offset": "$info_max_time$", + # "drilldown_name": "test_drill_name", + # "drilldown_search": "test_drill", + # "extract_artifacts": { + # "asset": [ + # "src", + # "dest", + # "dvc", + # "orig_host" + # ], + # "identity": [ + # "src_user", + # "user", + # "src_user_id", + # "src_user_role", + # "user_id", + # "user_role", + # "vendor_account" + # ] + # }, + # "investigation_profiles": [ + # "test profile 1", + # "test profile 2", + # "test profile 3" + # ], + # "next_steps": [ + # "makestreams", + # "nbtstat", + # "nslookup" + # ], + # "name": "ansible_test_notable", + # "recommended_actions": [ + # "email", + # "logevent", + # "makestreams", + # "nbtstat" + # ], + # "security_domain": "threat", + # "severity": "high" + # }, + # { } # there is no configuration associated with "/var" + # ] + + # Using merged + # ------------ + + - name: Example to add config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "correlation_search_name": "Ansible Test", + # "description": "test notable event", + # "drilldown_earliest_offset": "$info_min_time$", + # "drilldown_latest_offset": "$info_max_time$", + # "drilldown_name": "test_drill_name", + # "drilldown_search": "test_drill", + # "extract_artifacts": { + # "asset": [ + # "src", + # "dest", + # "dvc", + # "orig_host" + # ], + # "identity": [ + # "src_user", + # "user", + # "src_user_id", + # "src_user_role", + # "user_id", + # "user_role", + # "vendor_account" + # ] + # }, + # "investigation_profiles": [ + # "test profile 1", + # "test profile 2", + # "test profile 3" + # ], + # "next_steps": [ + # "makestreams", + # "nbtstat", + # "nslookup" + # ], + # "name": "ansible_test_notable", + # "recommended_actions": [ + # "email", + # "logevent", + # "makestreams", + # "nbtstat" + # ], + # "security_domain": "threat", + # "severity": "high" + # } + # ], + # "before": [], + + # Using replaced + # -------------- + + - name: Example to Replace the config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + state: replaced + + # RUN output: + # ----------- + + # "after": [ + # { + # "correlation_search_name": "Ansible Test", + # "description": "test notable event", + # "drilldown_earliest_offset": "$info_min_time$", + # "drilldown_latest_offset": "$info_max_time$", + # "extract_artifacts": { + # "asset": [ + # "src", + # "dest" + # ], + # "identity": [ + # "src_user", + # "user", + # "src_user_id" + # ] + # }, + # "next_steps": [ + # "makestreams" + # ], + # "name": "ansible_test_notable", + # "recommended_actions": [ + # "email", + # "logevent" + # ], + # "security_domain": "threat", + # "severity": "high" + # } + # ], + # "before": [ + # { + # "correlation_search_name": "Ansible Test", + # "description": "test notable event", + # "drilldown_earliest_offset": "$info_min_time$", + # "drilldown_latest_offset": "$info_max_time$", + # "drilldown_name": "test_drill_name", + # "drilldown_search": "test_drill", + # "extract_artifacts": { + # "asset": [ + # "src", + # "dest", + # "dvc", + # "orig_host" + # ], + # "identity": [ + # "src_user", + # "user", + # "src_user_id", + # "src_user_role", + # "user_id", + # "user_role", + # "vendor_account" + # ] + # }, + # "investigation_profiles": [ + # "test profile 1", + # "test profile 2", + # "test profile 3" + # ], + # "next_steps": [ + # "makestreams", + # "nbtstat", + # "nslookup" + # ], + # "name": "ansible_test_notable", + # "recommended_actions": [ + # "email", + # "logevent", + # "makestreams", + # "nbtstat" + # ], + # "security_domain": "threat", + # "severity": "high" + # } + # ], + + # USING DELETED + # ------------- + + - name: Example to remove the config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + state: deleted + + # RUN output: + # ----------- + + # "after": [], + # "before": [ + # { + # "correlation_search_name": "Ansible Test", + # "description": "test notable event", + # "drilldown_earliest_offset": "$info_min_time$", + # "drilldown_latest_offset": "$info_max_time$", + # "drilldown_name": "test_drill_name", + # "drilldown_search": "test_drill", + # "extract_artifacts": { + # "asset": [ + # "src", + # "dest", + # "dvc", + # "orig_host" + # ], + # "identity": [ + # "src_user", + # "user", + # "src_user_id", + # "src_user_role", + # "user_id", + # "user_role", + # "vendor_account" + # ] + # }, + # "investigation_profiles": [ + # "test profile 1", + # "test profile 2", + # "test profile 3" + # ], + # "next_steps": [ + # "makestreams", + # "nbtstat", + # "nslookup" + # ], + # "name": "ansible_test_notable", + # "recommended_actions": [ + # "email", + # "logevent", + # "makestreams", + # "nbtstat" + # ], + # "security_domain": "threat", + # "severity": "high" + # } + # ] + + + +Return Values +------------- +Common return values are documented `here <https://docs.ansible.com/ansible/latest/reference_appendices/common_return_values.html#common-return-values>`_, the following are the fields unique to this module: + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Key</th> + <th>Returned</th> + <th width="100%">Description</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>after</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>when changed</td> + <td> + <div>The configuration as structured data after module completion.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>before</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>always</td> + <td> + <div>The configuration as structured data prior to module invocation.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>gathered</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">dictionary</span> + </div> + </td> + <td>when state is <em>gathered</em></td> + <td> + <div>Facts about the network resource gathered from the remote device as structured data.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">This output will always be in the same format as the module argspec.</div> + </td> + </tr> + </table> + <br/><br/> + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst new file mode 100644 index 000000000..76295b5dd --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_correlation_searches_module.rst @@ -0,0 +1,1061 @@ +.. _splunk.es.splunk_correlation_searches_module: + + +************************************* +splunk.es.splunk_correlation_searches +************************************* + +**Splunk Enterprise Security Correlation searches resource module** + + +Version added: 2.1.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- This module allows for creation, deletion, and modification of Splunk Enterprise Security correlation searches +- Tested against Splunk Enterprise Server v8.2.3 with Splunk Enterprise Security v7.0.1 installed on it. + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="4">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="4"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Configure file and directory monitoring on the system</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>annotations</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Add context from industry standard cyber security mappings in Splunk Enterprise Security or custom annotations</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>cis20</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify CIS20 annotations</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>custom</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify custom framework and custom annotations</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>custom_annotations</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify annotations associated with custom framework</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>framework</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify annotation framework</div> + </td> + </tr> + + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>kill_chain_phases</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify Kill 10 annotations</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>mitre_attack</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify MITRE ATTACK annotations</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td class="elbow-placeholder"></td> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>nist</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify NIST annotations</div> + </td> + </tr> + + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>app</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"SplunkEnterpriseSecuritySuite"</div> + </td> + <td> + <div>Splunk app to associate the correlation seach with</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>cron_schedule</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"*/5 * * * *"</div> + </td> + <td> + <div>Enter a cron-style schedule.</div> + <div>For example <code>'*/5 * * * *'</code> (every 5 minutes) or <code>'0 21 * * *'</code> (every day at 9 PM).</div> + <div>Real-time searches use a default schedule of <code>'*/5 * * * *'</code>.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>description</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Description of the coorelation search, this will populate the description field for the web console</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>disabled</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>Disable correlation search</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>Name of correlation search</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>schedule_priority</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>default</b> ←</div></li> + <li>higher</li> + <li>highest</li> + </ul> + </td> + <td> + <div>Raise the scheduling priority of a report. Set to "Higher" to prioritize it above other searches of the same scheduling mode, or "Highest" to prioritize it above other searches regardless of mode. Use with discretion.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>schedule_window</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"0"</div> + </td> + <td> + <div>Let report run at any time within a window that opens at its scheduled run time, to improve efficiency when there are many concurrently scheduled reports. The "auto" setting automatically determines the best window width for the report.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>scheduling</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>realtime</b> ←</div></li> + <li>continuous</li> + </ul> + </td> + <td> + <div>Controls the way the scheduler computes the next execution time of a scheduled search.</div> + <div>Learn more: https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>search</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>SPL search string</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>suppress_alerts</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>To suppress alerts from this correlation search or not</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>throttle_fields_to_group_by</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=string</span> + </div> + </td> + <td> + </td> + <td> + <div>Type the fields to consider for matching events for throttling.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>throttle_window_duration</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>How much time to ignore other events that match the field values specified in Fields to group by.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_earliest</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"-24h"</div> + </td> + <td> + <div>Earliest time using relative time modifiers.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_latest</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"now"</div> + </td> + <td> + <div>Latest time using relative time modifiers.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>once</b> ←</div></li> + <li>for each result</li> + </ul> + </td> + <td> + <div>Notable response actions and risk response actions are always triggered for each result. Choose whether the trigger is activated once or for each result.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>number of events</b> ←</div></li> + <li>number of results</li> + <li>number of hosts</li> + <li>number of sources</li> + </ul> + </td> + <td> + <div>Raise the scheduling priority of a report. Set to "Higher" to prioritize it above other searches of the same scheduling mode, or "Highest" to prioritize it above other searches regardless of mode. Use with discretion.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when_condition</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>greater than</b> ←</div></li> + <li>less than</li> + <li>equal to</li> + <li>not equal to</li> + <li>drops by</li> + <li>rises by</li> + </ul> + </td> + <td> + <div>Conditional to pass to <code>trigger_alert_when</code></div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>trigger_alert_when_value</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"10"</div> + </td> + <td> + <div>Value to pass to <code>trigger_alert_when</code></div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="3"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ui_dispatch_context</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Set an app to use for links such as the drill-down search in a notable event or links in an email adaptive response action. If None, uses the Application Context.</div> + </td> + </tr> + + <tr> + <td colspan="4"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>running_config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The module, by default, will connect to the remote device and retrieve the current running-config to use as a base for comparing against the contents of source. There are times when it is not desirable to have the task get the current running-config for every task in a playbook. The <em>running_config</em> argument allows the implementer to pass in the configuration to use as the base config for comparison. This value of this option should be the output received from device by executing command.</div> + </td> + </tr> + <tr> + <td colspan="4"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>merged</b> ←</div></li> + <li>replaced</li> + <li>deleted</li> + <li>gathered</li> + </ul> + </td> + <td> + <div>The state the configuration should be left in</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + # Using gathered + # -------------- + + - name: Gather correlation searches config + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + - name: Ansible Test 2 + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "annotations": { + # "cis20": [ + # "test1" + # ], + # "custom": [ + # { + # "custom_annotations": [ + # "test5" + # ], + # "framework": "test_framework" + # } + # ], + # "kill_chain_phases": [ + # "test3" + # ], + # "mitre_attack": [ + # "test2" + # ], + # "nist": [ + # "test4" + # ] + # }, + # "app": "DA-ESS-EndpointProtection", + # "cron_schedule": "*/5 * * * *", + # "description": "test description", + # "disabled": false, + # "name": "Ansible Test", + # "schedule_priority": "default", + # "schedule_window": "0", + # "scheduling": "realtime", + # "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + # 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + # 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + # 'n.src" as "src" | where "count">=6', + # "suppress_alerts": false, + # "throttle_fields_to_group_by": [ + # "test_field1" + # ], + # "throttle_window_duration": "5s", + # "time_earliest": "-24h", + # "time_latest": "now", + # "trigger_alert": "once", + # "trigger_alert_when": "number of events", + # "trigger_alert_when_condition": "greater than", + # "trigger_alert_when_value": "10", + # "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" + # } + # ] + + # Using merged + # ------------ + + - name: Merge and create new correlation searches configuration + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + disabled: false + description: test description + app: DA-ESS-EndpointProtection + annotations: + cis20: + - test1 + mitre_attack: + - test2 + kill_chain_phases: + - test3 + nist: + - test4 + custom: + - framework: test_framework + custom_annotations: + - test5 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: realtime + schedule_window: "0" + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: "10" + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + suppress_alerts: False + search: > + '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6' + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "annotations": { + # "cis20": [ + # "test1" + # ], + # "custom": [ + # { + # "custom_annotations": [ + # "test5" + # ], + # "framework": "test_framework" + # } + # ], + # "kill_chain_phases": [ + # "test3" + # ], + # "mitre_attack": [ + # "test2" + # ], + # "nist": [ + # "test4" + # ] + # }, + # "app": "DA-ESS-EndpointProtection", + # "cron_schedule": "*/5 * * * *", + # "description": "test description", + # "disabled": false, + # "name": "Ansible Test", + # "schedule_priority": "default", + # "schedule_window": "0", + # "scheduling": "realtime", + # "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + # 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + # 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + # 'n.src" as "src" | where "count">=6', + # "suppress_alerts": false, + # "throttle_fields_to_group_by": [ + # "test_field1" + # ], + # "throttle_window_duration": "5s", + # "time_earliest": "-24h", + # "time_latest": "now", + # "trigger_alert": "once", + # "trigger_alert_when": "number of events", + # "trigger_alert_when_condition": "greater than", + # "trigger_alert_when_value": "10", + # "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" + # }, + # ], + # "before": [], + + # Using replaced + # -------------- + + - name: Replace existing correlation searches configuration + splunk.es.splunk_correlation_searches: + state: replaced + config: + - name: Ansible Test + disabled: false + description: test description + app: SplunkEnterpriseSecuritySuite + annotations: + cis20: + - test1 + - test2 + mitre_attack: + - test3 + - test4 + kill_chain_phases: + - test5 + - test6 + nist: + - test7 + - test8 + custom: + - framework: test_framework2 + custom_annotations: + - test9 + - test10 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: continuous + schedule_window: auto + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + - test_field2 + suppress_alerts: True + search: > + '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6' + + # RUN output: + # ----------- + + # "after": [ + # { + # "annotations": { + # "cis20": [ + # "test1", + # "test2" + # ], + # "custom": [ + # { + # "custom_annotations": [ + # "test9", + # "test10" + # ], + # "framework": "test_framework2" + # } + # ], + # "kill_chain_phases": [ + # "test5", + # "test6" + # ], + # "mitre_attack": [ + # "test3", + # "test4" + # ], + # "nist": [ + # "test7", + # "test8" + # ] + # }, + # "app": "SplunkEnterpriseSecuritySuite", + # "cron_schedule": "*/5 * * * *", + # "description": "test description", + # "disabled": false, + # "name": "Ansible Test", + # "schedule_priority": "default", + # "schedule_window": "auto", + # "scheduling": "continuous", + # "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + # 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + # 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + # 'n.src" as "src" | where "count">=6', + # "suppress_alerts": true, + # "throttle_fields_to_group_by": [ + # "test_field1", + # "test_field2" + # ], + # "throttle_window_duration": "5s", + # "time_earliest": "-24h", + # "time_latest": "now", + # "trigger_alert": "once", + # "trigger_alert_when": "number of events", + # "trigger_alert_when_condition": "greater than", + # "trigger_alert_when_value": "10", + # "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" + # } + # ], + # "before": [ + # { + # "annotations": { + # "cis20": [ + # "test1" + # ], + # "custom": [ + # { + # "custom_annotations": [ + # "test5" + # ], + # "framework": "test_framework" + # } + # ], + # "kill_chain_phases": [ + # "test3" + # ], + # "mitre_attack": [ + # "test2" + # ], + # "nist": [ + # "test4" + # ] + # }, + # "app": "DA-ESS-EndpointProtection", + # "cron_schedule": "*/5 * * * *", + # "description": "test description", + # "disabled": false, + # "name": "Ansible Test", + # "schedule_priority": "default", + # "schedule_window": "0", + # "scheduling": "realtime", + # "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + # 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + # 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + # 'n.src" as "src" | where "count">=6', + # "suppress_alerts": false, + # "throttle_fields_to_group_by": [ + # "test_field1" + # ], + # "throttle_window_duration": "5s", + # "time_earliest": "-24h", + # "time_latest": "now", + # "trigger_alert": "once", + # "trigger_alert_when": "number of events", + # "trigger_alert_when_condition": "greater than", + # "trigger_alert_when_value": "10", + # "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" + # } + # ] + + # Using deleted + # ------------- + + - name: Example to delete the corelation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + state: deleted + + # RUN output: + # ----------- + + # "after": [], + # "before": [ + # { + # "annotations": { + # "cis20": [ + # "test1" + # ], + # "custom": [ + # { + # "custom_annotations": [ + # "test5" + # ], + # "framework": "test_framework" + # } + # ], + # "kill_chain_phases": [ + # "test3" + # ], + # "mitre_attack": [ + # "test2" + # ], + # "nist": [ + # "test4" + # ] + # }, + # "app": "DA-ESS-EndpointProtection", + # "cron_schedule": "*/5 * * * *", + # "description": "test description", + # "disabled": false, + # "name": "Ansible Test", + # "schedule_priority": "default", + # "schedule_window": "0", + # "scheduling": "realtime", + # "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + # 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + # 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + # 'n.src" as "src" | where "count">=6', + # "suppress_alerts": false, + # "throttle_fields_to_group_by": [ + # "test_field1" + # ], + # "throttle_window_duration": "5s", + # "time_earliest": "-24h", + # "time_latest": "now", + # "trigger_alert": "once", + # "trigger_alert_when": "number of events", + # "trigger_alert_when_condition": "greater than", + # "trigger_alert_when_value": "10", + # "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" + # }, + # ], + + + +Return Values +------------- +Common return values are documented `here <https://docs.ansible.com/ansible/latest/reference_appendices/common_return_values.html#common-return-values>`_, the following are the fields unique to this module: + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Key</th> + <th>Returned</th> + <th width="100%">Description</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>after</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>when changed</td> + <td> + <div>The configuration as structured data after module completion.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>before</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>always</td> + <td> + <div>The configuration as structured data prior to module invocation.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>gathered</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">dictionary</span> + </div> + </td> + <td>when state is <em>gathered</em></td> + <td> + <div>Facts about the network resource gathered from the remote device as structured data.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">This output will always be in the same format as the module argspec.</div> + </td> + </tr> + </table> + <br/><br/> + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst new file mode 100644 index 000000000..54cb445ea --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_monitor_module.rst @@ -0,0 +1,603 @@ +.. _splunk.es.splunk_data_inputs_monitor_module: + + +************************************ +splunk.es.splunk_data_inputs_monitor +************************************ + +**Splunk Data Inputs of type Monitor resource module** + + +Version added: 2.1.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Module to add/modify or delete, File and Directory Monitor Data Inputs in Splunk. +- Tested against Splunk Enterprise Server 8.2.3 + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="2">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Configure file and directory monitoring on the system</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>blacklist</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>check_index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, the index value is checked to ensure that it is the name of a valid index.</div> + <div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>check_path</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, the name value is checked to ensure that it exists.</div> + <div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>crc_salt</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>A string that modifies the file tracking identity for files in this input. The magic value <SOURCE> invokes special behavior (see admin documentation).</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>disabled</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>no</b> ←</div></li> + <li>yes</li> + </ul> + </td> + <td> + <div>Indicates if input monitoring is disabled.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>follow_tail</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to <code>True</code>, files that are seen for the first time is read from the end.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"$decideOnStartup"</div> + </td> + <td> + <div>The value to populate in the host field for events from this data input.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host_regex</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. If the path for a file matches this regular expression, the captured value is used to populate the host field for events from this data input. The regular expression must have one capture group.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host_segment</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + </td> + <td> + <div>Use the specified slash-separate segment of the filepath as the host field value.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ignore_older_than</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a time value. If the modification time of a file being monitored falls outside of this rolling time window, the file is no longer being monitored.</div> + <div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <b>Default:</b><br/><div style="color: blue">"default"</div> + </td> + <td> + <div>Which index events from this input should be stored in. Defaults to default.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>The file or directory path to monitor on the system.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>recursive</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>Setting this to False prevents monitoring of any subdirectories encountered within this data input.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>rename_source</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The value to populate in the source field for events from this data input. The same source should not be used for multiple data inputs.</div> + <div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>sourcetype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The value to populate in the sourcetype field for incoming events.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>time_before_close</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + </td> + <td> + <div>When Splunk software reaches the end of a file that is being read, the file is kept open for a minimum of the number of seconds specified in this value. After this period has elapsed, the file is checked again for more data.</div> + <div>This parameter is not returned back by Splunk while obtaining object information. It is therefore left out while performing idempotency checks</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>whitelist</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.</div> + </td> + </tr> + + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>running_config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The module, by default, will connect to the remote device and retrieve the current running-config to use as a base for comparing against the contents of source. There are times when it is not desirable to have the task get the current running-config for every task in a playbook. The <em>running_config</em> argument allows the implementer to pass in the configuration to use as the base config for comparison. This value of this option should be the output received from device by executing command.</div> + </td> + </tr> + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>merged</b> ←</div></li> + <li>replaced</li> + <li>deleted</li> + <li>gathered</li> + </ul> + </td> + <td> + <div>The state the configuration should be left in</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + # Using gathered + # -------------- + + - name: Gather config for specified Data inputs monitors + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + - name: "/var" + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "blacklist": "//var/log/[a-z0-9]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "host_regex": "/(test_host)/gm", + # "host_segment": 3, + # "index": "default", + # "name": "/var/log", + # "recursive": true, + # "sourcetype": "test_source", + # "whitelist": "//var/log/[0-9]/gm" + # } + # ] + # + + # Using merged + # ------------ + + - name: Update Data inputs monitors config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: "//var/log/[a-z]/gm" + check_index: True + check_path: True + crc_salt: <SOURCE> + rename_source: "test" + whitelist: "//var/log/[0-9]/gm" + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "blacklist": "//var/log/[a-z]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "host_regex": "/(test_host)/gm", + # "host_segment": 3, + # "index": "default", + # "name": "/var/log", + # "recursive": true, + # "sourcetype": "test_source", + # "whitelist": "//var/log/[0-9]/gm" + # } + # ], + # "before": [ + # { + # "blacklist": "//var/log/[a-z0-9]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "host_regex": "/(test_host)/gm", + # "host_segment": 3, + # "index": "default", + # "name": "/var/log", + # "recursive": true, + # "sourcetype": "test_source", + # "whitelist": "//var/log/[0-9]/gm" + # } + # ], + + # Using replaced + # -------------- + + - name: To Replace Data inputs monitors config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: "//var/log/[a-z0-9]/gm" + crc_salt: <SOURCE> + index: default + state: replaced + + # RUN output: + # ----------- + + # "after": [ + # { + # "blacklist": "//var/log/[a-z0-9]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "/var/log" + # } + # ], + # "before": [ + # { + # "blacklist": "//var/log/[a-z0-9]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "host_regex": "/(test_host)/gm", + # "host_segment": 3, + # "index": "default", + # "name": "/var/log", + # "recursive": true, + # "sourcetype": "test_source", + # "whitelist": "//var/log/[0-9]/gm" + # } + # ], + + # Using deleted + # ----------- + - name: To Delete Data inpur monitor config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + state: deleted + + # RUN output: + # ----------- + # + # "after": [], + # "before": [ + # { + # "blacklist": "//var/log/[a-z0-9]/gm", + # "crc_salt": "<SOURCE>", + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "/var/log" + # } + # ], + + + +Return Values +------------- +Common return values are documented `here <https://docs.ansible.com/ansible/latest/reference_appendices/common_return_values.html#common-return-values>`_, the following are the fields unique to this module: + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Key</th> + <th>Returned</th> + <th width="100%">Description</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>after</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>when changed</td> + <td> + <div>The configuration as structured data after module completion.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>before</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>always</td> + <td> + <div>The configuration as structured data prior to module invocation.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">The configuration returned will always be in the same format of the parameters above.</div> + </td> + </tr> + </table> + <br/><br/> + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst new file mode 100644 index 000000000..aa561b1f0 --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_data_inputs_network_module.rst @@ -0,0 +1,965 @@ +.. _splunk.es.splunk_data_inputs_network_module: + + +************************************ +splunk.es.splunk_data_inputs_network +************************************ + +**Manage Splunk Data Inputs of type TCP or UDP resource module** + + +Version added: 2.1.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Module that allows to add/update or delete of TCP and UDP Data Inputs in Splunk. + + + + +Parameters +---------- + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="2">Parameter</th> + <th>Choices/<font color="blue">Defaults</font></th> + <th width="100%">Comments</th> + </tr> + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + / <span style="color: purple">elements=dictionary</span> + </div> + </td> + <td> + </td> + <td> + <div>Manage and preview protocol input data.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>cipher_suite</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Specifies list of acceptable ciphers to use in ssl.</div> + <div>Only obtained for TCP SSL configuration present on device.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>connection_host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>ip</li> + <li>dns</li> + <li>none</li> + </ul> + </td> + <td> + <div>Set the host for the remote server that is sending data.</div> + <div><code>ip</code> sets the host to the IP address of the remote server sending data.</div> + <div><code>dns</code> sets the host to the reverse DNS entry for the IP address of the remote server sending data.</div> + <div><code>none</code> leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>datatype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>cooked</li> + <li>raw</li> + <li>splunktcptoken</li> + <li>ssl</li> + </ul> + </td> + <td> + <div><code>cooked</code> lets one access cooked TCP input information and create new containers for managing cooked data.</div> + <div><code>raw</code> lets one manage raw tcp inputs from forwarders.</div> + <div><code>splunktcptoken</code> lets one manage receiver access using tokens.</div> + <div><code>ssl</code> Provides access to the SSL configuration of a Splunk server. This option does not support states <em>deleted</em> and <em>replaced</em>.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>disabled</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>Indicates whether the input is disabled.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Host from which the indexer gets data.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>index</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>default Index to store generated events.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>name</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + </td> + <td> + <div>The input port which receives raw data.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>no_appending_timestamp</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to true, prevents Splunk software from prepending a timestamp and hostname to incoming events.</div> + <div>Only for UDP data input configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>no_priority_stripping</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>If set to true, Splunk software does not remove the priority field from incoming syslog events.</div> + <div>Only for UDP data input configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>password</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Server certificate password, if any.</div> + <div>Only for TCP SSL configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>protocol</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + / <span style="color: red">required</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>tcp</li> + <li>udp</li> + </ul> + </td> + <td> + <div>Choose whether to manage TCP or UDP inputs</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>queue</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>parsingQueue</li> + <li>indexQueue</li> + </ul> + </td> + <td> + <div>Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.</div> + <div>Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"</div> + <div>Set queue to indexQueue to send your data directly into the index.</div> + <div>Only applicable for "/tcp/raw" and "/udp" APIs</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>raw_tcp_done_timeout</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">integer</span> + </div> + </td> + <td> + </td> + <td> + <div>Specifies in seconds the timeout value for adding a Done-key.</div> + <div>If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.</div> + <div>Only for TCP raw input configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>require_client_cert</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Determines whether a client must authenticate.</div> + <div>Only for TCP SSL configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>restrict_to_host</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Allows for restricting this input to only accept data from the host specified here.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>root_ca</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Certificate authority list (root file).</div> + <div>Only for TCP SSL configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>server_cert</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Full path to the server certificate.</div> + <div>Only for TCP SSL configuration.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>source</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Sets the source key/field for events from this input. Defaults to the input file path.</div> + <div>Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.</div> + <div>Note that Overriding the source key is generally not recommended. Typically, the input layer provides a more accurate string to aid in problem analysis and investigation, accurately recording the file from which the data was retrieved. Consider use of source types, tagging, and search wildcards before overriding this value.</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>sourcetype</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Set the source type for events from this input.</div> + <div>"sourcetype=" is automatically prepended to <string>.</div> + <div>Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>ssl</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">boolean</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li>no</li> + <li>yes</li> + </ul> + </td> + <td> + <div>Enable or disble ssl for the data stream</div> + </td> + </tr> + <tr> + <td class="elbow-placeholder"></td> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>token</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>Token value to use for SplunkTcpToken. If unspecified, a token is generated automatically.</div> + </td> + </tr> + + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>running_config</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + </td> + <td> + <div>The module, by default, will connect to the remote device and retrieve the current running-config to use as a base for comparing against the contents of source. There are times when it is not desirable to have the task get the current running-config for every task in a playbook. The <em>running_config</em> argument allows the implementer to pass in the configuration to use as the base config for comparison. This value of this option should be the output received from device by executing command.</div> + </td> + </tr> + <tr> + <td colspan="2"> + <div class="ansibleOptionAnchor" id="parameter-"></div> + <b>state</b> + <a class="ansibleOptionLink" href="#parameter-" title="Permalink to this option"></a> + <div style="font-size: small"> + <span style="color: purple">string</span> + </div> + </td> + <td> + <ul style="margin: 0; padding: 0"><b>Choices:</b> + <li><div style="color: blue"><b>merged</b> ←</div></li> + <li>replaced</li> + <li>deleted</li> + <li>gathered</li> + </ul> + </td> + <td> + <div>The state the configuration should be left in</div> + </td> + </tr> + </table> + <br/> + + + + +Examples +-------- + +.. code-block:: yaml + + # Using gathered + # -------------- + + - name: Gathering information about TCP Cooked Inputs + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "connection_host": "ip", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "8101" + # }, + # { + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "9997" + # }, + # { + # "connection_host": "ip", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8101", + # "restrict_to_host": "default" + # } + # ] + + + - name: Gathering information about TCP Cooked Inputs by Name + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + name: 9997 + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "datatype": "cooked", + # "disabled": false, + # "host": "$decideOnStartup", + # "name": "9997", + # "protocol": "tcp" + # } + # ] + + + - name: Gathering information about TCP Raw Inputs + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "connection_host": "ip", + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "8099", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 10 + # }, + # { + # "connection_host": "ip", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 10, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ] + + - name: Gathering information about TCP Raw inputs by Name + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8099 + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "8099", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 10 + # } + # ] + + - name: Gathering information about TCP SSL configuration + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: ssl + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "cipher_suite": <cipher-suites>, + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "test_host" + # } + # ] + + - name: Gathering information about TCP SplunkTcpTokens + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: splunktcptoken + state: gathered + + # RUN output: + # ----------- + + # "gathered": [ + # { + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "splunktcptoken://test_token1", + # "token": <token1> + # }, + # { + # "disabled": false, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "splunktcptoken://test_token2", + # "token": <token2> + # } + # ] + + # Using merged + # ------------ + + - name: To add the TCP raw config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + raw_tcp_done_timeout: 9 + restrict_to_host: default + queue: parsingQueue + source: test_source + sourcetype: test_source_type + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 9, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ], + # "before": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 10, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ] + + - name: To add the TCP cooked config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: False + restrict_to_host: default + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "connection_host": "ip", + # "datatype": "cooked", + # "disabled": false, + # "host": "$decideOnStartup", + # "name": "default:8101", + # "protocol": "tcp", + # "restrict_to_host": "default" + # } + # ], + # "before": [ + # { + # "connection_host": "ip", + # "datatype": "cooked", + # "disabled": true, + # "host": "$decideOnStartup", + # "name": "default:8101", + # "protocol": "tcp", + # "restrict_to_host": "default" + # } + # ], + + - name: To add the Splunk TCP token + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: splunktcptoken + name: test_token + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "datatype": "splunktcptoken", + # "name": "splunktcptoken://test_token", + # "protocol": "tcp", + # "token": <token> + # } + # ], + # "before": [], + + - name: To add the Splunk SSL + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: ssl + name: test_host + root_ca: {root CA directory} + server_cert: {server cretificate directory} + state: merged + + # RUN output: + # ----------- + + # "after": [ + # { + # "cipher_suite": <cipher suite>, + # "datatype": "ssl", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "test_host", + # "protocol": "tcp" + # } + # ], + # "before": [] + + + # Using deleted + # ------------- + + - name: To Delete TCP Raw + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: default:8100 + state: deleted + + # RUN output: + # ----------- + + # "after": [], + # "before": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 9, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ] + + # Using replaced + # -------------- + + - name: Replace existing data inputs networks configuration + register: result + splunk.es.splunk_data_inputs_network: + state: replaced + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 10 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + + # RUN output: + # ----------- + + # "after": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 9, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ], + # "before": [ + # { + # "connection_host": "ip", + # "datatype": "raw", + # "disabled": true, + # "host": "$decideOnStartup", + # "index": "default", + # "name": "default:8100", + # "protocol": "tcp", + # "queue": "parsingQueue", + # "raw_tcp_done_timeout": 10, + # "restrict_to_host": "default", + # "source": "test_source", + # "sourcetype": "test_source_type" + # } + # ], + + + +Return Values +------------- +Common return values are documented `here <https://docs.ansible.com/ansible/latest/reference_appendices/common_return_values.html#common-return-values>`_, the following are the fields unique to this module: + +.. raw:: html + + <table border=0 cellpadding=0 class="documentation-table"> + <tr> + <th colspan="1">Key</th> + <th>Returned</th> + <th width="100%">Description</th> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>after</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>when changed</td> + <td> + <div>The resulting configuration after module execution.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">This output will always be in the same format as the module argspec.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>before</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">list</span> + </div> + </td> + <td>when state is <em>merged</em>, <em>replaced</em>, <em>deleted</em></td> + <td> + <div>The configuration prior to the module execution.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">This output will always be in the same format as the module argspec.</div> + </td> + </tr> + <tr> + <td colspan="1"> + <div class="ansibleOptionAnchor" id="return-"></div> + <b>gathered</b> + <a class="ansibleOptionLink" href="#return-" title="Permalink to this return value"></a> + <div style="font-size: small"> + <span style="color: purple">dictionary</span> + </div> + </td> + <td>when state is <em>gathered</em></td> + <td> + <div>Facts about the network resource gathered from the remote device as structured data.</div> + <br/> + <div style="font-size: smaller"><b>Sample:</b></div> + <div style="font-size: smaller; color: blue; word-wrap: break-word; word-break: break-all;">This output will always be in the same format as the module argspec.</div> + </td> + </tr> + </table> + <br/><br/> + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> diff --git a/ansible_collections/splunk/es/docs/splunk.es.splunk_httpapi.rst b/ansible_collections/splunk/es/docs/splunk.es.splunk_httpapi.rst new file mode 100644 index 000000000..d2c82039e --- /dev/null +++ b/ansible_collections/splunk/es/docs/splunk.es.splunk_httpapi.rst @@ -0,0 +1,43 @@ +.. _splunk.es.splunk_httpapi: + + +**************** +splunk.es.splunk +**************** + +**HttpApi Plugin for Splunk** + + +Version added: 1.0.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- This HttpApi plugin provides methods to connect to Splunk over a HTTP(S)-based api. + + + + + + + + + + + +Status +------ + + +Authors +~~~~~~~ + +- Ansible Security Team (@ansible-security) + + +.. hint:: + Configuration entries for each entry type have a low to high priority order. For example, a variable that is lower in the list will override a variable that is higher up. diff --git a/ansible_collections/splunk/es/meta/runtime.yml b/ansible_collections/splunk/es/meta/runtime.yml new file mode 100644 index 000000000..468a3d807 --- /dev/null +++ b/ansible_collections/splunk/es/meta/runtime.yml @@ -0,0 +1,49 @@ +--- +plugin_routing: + action: + correlation_search: + redirect: splunk.es.splunk + correlation_searches: + redirect: splunk.es.splunk + data_input_monitor: + redirect: splunk.es.splunk + data_inputs_monitor: + redirect: splunk.es.splunk + data_input_network: + redirect: splunk.es.splunk + data_inputs_network: + redirect: splunk.es.splunk + adaptive_response_notable_event: + redirect: splunk.es.splunk + adaptive_response_notable_events: + redirect: splunk.es.splunk + modules: + correlation_search: + deprecation: + removal_date: "2024-09-01" + warning_text: See the plugin documentation for more details + redirect: splunk.es.splunk_correlation_search + correlation_searches: + redirect: splunk.es.splunk_correlation_searches + data_input_monitor: + deprecation: + removal_date: "2024-09-01" + warning_text: See the plugin documentation for more details + redirect: splunk.es.splunk_data_input_monitor + data_inputs_monitor: + redirect: splunk.es.splunk_data_inputs_monitor + data_input_network: + deprecation: + removal_date: "2024-09-01" + warning_text: See the plugin documentation for more details + redirect: splunk.es.splunk_data_input_network + data_inputs_network: + redirect: splunk.es.splunk_data_inputs_network + adaptive_response_notable_event: + deprecation: + removal_date: "2024-09-01" + warning_text: See the plugin documentation for more details + redirect: splunk.es.splunk_adaptive_response_notable_event + adaptive_response_notable_events: + redirect: splunk.es.splunk_adaptive_response_notable_events +requires_ansible: ">=2.9.10" diff --git a/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py new file mode 100644 index 000000000..a95e4b3ed --- /dev/null +++ b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py @@ -0,0 +1,529 @@ +# +# Copyright 2022 Red Hat Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +""" +The module file for adaptive_response_notable_events +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json + +from ansible.plugins.action import ActionBase +from ansible.errors import AnsibleActionFail +from ansible.module_utils.six.moves.urllib.parse import quote +from ansible.module_utils.connection import Connection + +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, + map_obj_to_params, + map_params_to_obj, + remove_get_keys_from_payload_dict, + set_defaults, +) +from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import ( + AnsibleArgSpecValidator, +) +from ansible_collections.splunk.es.plugins.modules.splunk_adaptive_response_notable_events import ( + DOCUMENTATION, +) + + +class ActionModule(ActionBase): + """action module""" + + def __init__(self, *args, **kwargs): + super(ActionModule, self).__init__(*args, **kwargs) + self._result = None + self.api_object = ( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches" + ) + self.module_name = "adaptive_response_notable_events" + self.key_transform = { + "action.notable.param.default_owner": "default_owner", + "action.notable.param.default_status": "default_status", + "action.notable.param.drilldown_name": "drilldown_name", + "action.notable.param.drilldown_search": "drilldown_search", + "action.notable.param.drilldown_earliest_offset": "drilldown_earliest_offset", + "action.notable.param.drilldown_latest_offset": "drilldown_latest_offset", + "action.notable.param.extract_artifacts": "extract_artifacts", + "action.notable.param.investigation_profiles": "investigation_profiles", + "action.notable.param.next_steps": "next_steps", + "action.notable.param.recommended_actions": "recommended_actions", + "action.notable.param.rule_description": "description", + "action.notable.param.rule_title": "name", + "action.notable.param.security_domain": "security_domain", + "action.notable.param.severity": "severity", + "name": "correlation_search_name", + } + + def _check_argspec(self): + aav = AnsibleArgSpecValidator( + data=self._task.args, + schema=DOCUMENTATION, + schema_format="doc", + name=self._task.action, + ) + valid, errors, self._task.args = aav.validate() + if not valid: + self._result["failed"] = True + self._result["msg"] = errors + + def fail_json(self, msg): + """Replace the AnsibleModule fail_json here + :param msg: The message for the failure + :type msg: str + """ + msg = msg.replace("(basic.py)", self._task.action) + raise AnsibleActionFail(msg) + + # need to store 'recommended_actions','extract_artifacts','next_steps' and 'investigation_profiles' + # since merging in the parsed form will eliminate any differences + def save_params(self, want_conf): + param_store = {} + if "recommended_actions" in want_conf: + param_store["recommended_actions"] = want_conf[ + "recommended_actions" + ] + if "extract_artifacts" in want_conf: + param_store["extract_artifacts"] = want_conf["extract_artifacts"] + if "next_steps" in want_conf: + param_store["next_steps"] = want_conf["next_steps"] + if "investigation_profiles" in want_conf: + param_store["investigation_profiles"] = want_conf[ + "investigation_profiles" + ] + + return param_store + + # responsible for correctly setting certain parameters depending on the state being triggered. + # These parameters are responsible for enabling and disabling notable response actions + def create_metadata(self, metadata, mode="add"): + if mode == "add": + if "actions" in metadata: + if metadata["actions"] == "notable": + pass + elif ( + len(metadata["actions"].split(",")) > 0 + and "notable" not in metadata["actions"] + ): + metadata["actions"] = metadata["actions"] + ", notable" + else: + metadata["actions"] = "notable" + metadata["action.notable"] = "1" + elif mode == "delete": + if "actions" in metadata: + if metadata["actions"] == "notable": + metadata["actions"] = "" + elif ( + len(metadata["actions"].split(",")) > 0 + and "notable" in metadata["actions"] + ): + tmp_list = metadata["actions"].split(",") + tmp_list.remove(" notable") + metadata["actions"] = ",".join(tmp_list) + metadata["action.notable"] = "0" + + return metadata + + def map_params_to_object(self, config): + res = {} + res["correlation_search_name"] = config["name"] + + res.update(map_params_to_obj(config["content"], self.key_transform)) + + if "extract_artifacts" in res: + res["extract_artifacts"] = json.loads(res["extract_artifacts"]) + + if "investigation_profiles" in res: + if res["investigation_profiles"] == "{}": + res.pop("investigation_profiles") + else: + res["investigation_profiles"] = json.loads( + res["investigation_profiles"] + ) + investigation_profiles = [] + for keys in res["investigation_profiles"].keys(): + investigation_profiles.append(keys.split("profile://")[1]) + res["investigation_profiles"] = investigation_profiles + + if "recommended_actions" in res: + res["recommended_actions"] = res["recommended_actions"].split(",") + + if "next_steps" in res: + next_steps = json.loads(res["next_steps"])["data"] + + next_steps = next_steps.split("]][[") + # trimming trailing characters + next_steps[0] = next_steps[0].strip("[") + next_steps[-1] = next_steps[-1].strip("]") + + res["next_steps"] = [] + for element in next_steps: + res["next_steps"].append(element.split("|")[1]) + + if "default_status" in res: + mapping = { + "0": "unassigned", + "1": "new", + "2": "in progress", + "3": "pending", + "4": "resolved", + "5": "closed", + } + res["default_status"] = mapping[res["default_status"]] + + # need to store correlation search details for populating future request payloads + metadata = {} + metadata["search"] = config["content"]["search"] + metadata["actions"] = config["content"]["actions"] + + return res, metadata + + def map_objects_to_params(self, metadata, want_conf): + res = {} + + res.update(map_obj_to_params(want_conf, self.key_transform)) + res.update(self.create_metadata(metadata)) + + if "action.notable.param.extract_artifacts" in res: + res["action.notable.param.extract_artifacts"] = json.dumps( + res["action.notable.param.extract_artifacts"] + ) + + if "action.notable.param.recommended_actions" in res: + res["action.notable.param.recommended_actions"] = ",".join( + res["action.notable.param.recommended_actions"] + ) + + if "action.notable.param.investigation_profiles" in res: + investigation_profiles = {} + for element in res["action.notable.param.investigation_profiles"]: + investigation_profiles["profile://" + element] = {} + res["action.notable.param.investigation_profiles"] = json.dumps( + investigation_profiles + ) + + if "action.notable.param.next_steps" in res: + next_steps = "" + for next_step in res["action.notable.param.next_steps"]: + next_steps += "[[action|{0}]]".format(next_step) + + # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI + next_steps_dict = {"version": 1, "data": next_steps} + res["action.notable.param.next_steps"] = json.dumps( + next_steps_dict + ) + + if "action.notable.param.default_status" in res: + mapping = { + "unassigned": "0", + "new": "1", + "in progress": "2", + "pending": "3", + "resolved": "4", + "closed": "5", + } + res["action.notable.param.default_status"] = mapping[ + res["action.notable.param.default_status"] + ] + + # need to remove 'name', otherwise the API call will try to modify the correlation search + res.pop("name") + + return res + + def search_for_resource_name(self, conn_request, correlation_search_name): + query_dict = conn_request.get_by_path( + "{0}/{1}".format( + self.api_object, + quote(correlation_search_name), + ) + ) + + search_result = {} + + if query_dict: + search_result, metadata = self.map_params_to_object( + query_dict["entry"][0] + ) + else: + raise AnsibleActionFail( + "Correlation Search '{0}' doesn't exist".format( + correlation_search_name + ) + ) + + return search_result, metadata + + # Since there is no delete operation associated with an action, + # The delete operation will unset the relevant fields + def delete_module_api_config(self, conn_request, config): + before = [] + after = None + changed = False + for want_conf in config: + search_by_name, metadata = self.search_for_resource_name( + conn_request, want_conf["correlation_search_name"] + ) + search_by_name = utils.remove_empties(search_by_name) + + # Compare obtained values with a dict representing values in a 'deleted' state + diff_cmp = { + "correlation_search_name": want_conf[ + "correlation_search_name" + ], + "drilldown_earliest_offset": "$info_min_time$", + "drilldown_latest_offset": "$info_max_time$", + } + + # if the obtained values are different from 'deleted' state values + if search_by_name and search_by_name != diff_cmp: + before.append(search_by_name) + payload = { + "action.notable.param.default_owner": "", + "action.notable.param.default_status": "", + "action.notable.param.drilldown_name": "", + "action.notable.param.drilldown_search": "", + "action.notable.param.drilldown_earliest_offset": "$info_min_time$", + "action.notable.param.drilldown_latest_offset": "$info_max_time$", + "action.notable.param.extract_artifacts": "{}", + "action.notable.param.investigation_profiles": "{}", + "action.notable.param.next_steps": "", + "action.notable.param.recommended_actions": "", + "action.notable.param.rule_description": "", + "action.notable.param.rule_title": "", + "action.notable.param.security_domain": "", + "action.notable.param.severity": "", + } + payload.update(self.create_metadata(metadata, mode="delete")) + url = "{0}/{1}".format( + self.api_object, + quote(want_conf["correlation_search_name"]), + ) + conn_request.create_update( + url, + data=payload, + ) + changed = True + after = [] + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def configure_module_api(self, conn_request, config): + before = [] + after = [] + changed = False + # Add to the THIS list for the value which needs to be excluded + # from HAVE params when compared to WANT param like 'ID' can be + # part of HAVE param but may not be part of your WANT param + defaults = { + "drilldown_earliest_offset": "$info_min_time$", + "drilldown_latest_offset": "$info_max_time$", + "extract_artifacts": { + "asset": [ + "src", + "dest", + "dvc", + "orig_host", + ], + "identity": [ + "src_user", + "user", + "src_user_id", + "src_user_role", + "user_id", + "user_role", + "vendor_account", + ], + }, + "investigation_profiles": "{}", + } + remove_from_diff_compare = [] + for want_conf in config: + have_conf, metadata = self.search_for_resource_name( + conn_request, want_conf["correlation_search_name"] + ) + correlation_search_name = want_conf["correlation_search_name"] + + if "notable" in metadata["actions"]: + want_conf = set_defaults(want_conf, defaults) + want_conf = utils.remove_empties(want_conf) + diff = utils.dict_diff(have_conf, want_conf) + + # Check if have_conf has extra parameters + if self._task.args["state"] == "replaced": + diff2 = utils.dict_diff(want_conf, have_conf) + if len(diff) or len(diff2): + diff.update(diff2) + + if diff: + before.append(have_conf) + if self._task.args["state"] == "merged": + + # need to store 'recommended_actions','extract_artifacts' + # 'next_steps' and 'investigation_profiles' + # since merging in the parsed form will eliminate any differences + param_store = self.save_params(want_conf) + + want_conf = utils.remove_empties( + utils.dict_merge(have_conf, want_conf) + ) + want_conf = remove_get_keys_from_payload_dict( + want_conf, remove_from_diff_compare + ) + + # restoring parameters + want_conf.update(param_store) + + changed = True + + payload = self.map_objects_to_params( + metadata, want_conf + ) + + url = "{0}/{1}".format( + self.api_object, + quote(correlation_search_name), + ) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json, metadata = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + elif self._task.args["state"] == "replaced": + self.delete_module_api_config( + conn_request=conn_request, config=[want_conf] + ) + changed = True + + payload = self.map_objects_to_params( + metadata, want_conf + ) + + url = "{0}/{1}".format( + self.api_object, + quote(correlation_search_name), + ) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json, metadata = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + else: + before.append(have_conf) + after.append(have_conf) + else: + changed = True + want_conf = utils.remove_empties(want_conf) + payload = self.map_objects_to_params(metadata, want_conf) + + url = "{0}/{1}".format( + self.api_object, + quote(correlation_search_name), + ) + api_response = conn_request.create_update( + url, + data=payload, + ) + + response_json, metadata = self.map_params_to_object( + api_response["entry"][0] + ) + + after.extend(before) + after.append(response_json) + if not changed: + after = None + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def run(self, tmp=None, task_vars=None): + self._supports_check_mode = True + self._result = super(ActionModule, self).run(tmp, task_vars) + + self._check_argspec() + if self._result.get("failed"): + return self._result + + self._result[self.module_name] = {} + + # config is retrieved as a string; need to deserialise + config = self._task.args.get("config") + + conn = Connection(self._connection.socket_path) + + conn_request = SplunkRequest( + action_module=self, + connection=conn, + not_rest_data_keys=["state"], + ) + + if self._task.args["state"] == "gathered": + if config: + self._result["changed"] = False + self._result[self.module_name]["gathered"] = [] + for item in config: + self._result[self.module_name]["gathered"].append( + self.search_for_resource_name( + conn_request, item["correlation_search_name"] + )[0] + ) + + elif ( + self._task.args["state"] == "merged" + or self._task.args["state"] == "replaced" + ): + ( + self._result[self.module_name], + self._result["changed"], + ) = self.configure_module_api(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + elif self._task.args["state"] == "deleted": + ( + self._result[self.module_name], + self._result["changed"], + ) = self.delete_module_api_config(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + return self._result diff --git a/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py new file mode 100644 index 000000000..5f0daea16 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py @@ -0,0 +1,435 @@ +# +# Copyright 2022 Red Hat Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +""" +The module file for splunk_correlation_searches +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json +from ansible.plugins.action import ActionBase +from ansible.errors import AnsibleActionFail +from ansible.module_utils.six.moves.urllib.parse import quote +from ansible.module_utils.connection import Connection + +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, + map_obj_to_params, + map_params_to_obj, + remove_get_keys_from_payload_dict, + set_defaults, +) +from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import ( + AnsibleArgSpecValidator, +) +from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import ( + DOCUMENTATION, +) + + +class ActionModule(ActionBase): + """action module""" + + def __init__(self, *args, **kwargs): + super(ActionModule, self).__init__(*args, **kwargs) + self._result = None + self.api_object = ( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches" + ) + self.module_name = "correlation_searches" + self.key_transform = { + "disabled": "disabled", + "name": "name", + "description": "description", + "search": "search", + "action.correlationsearch.annotations": "annotations", + "request.ui_dispatch_app": "ui_dispatch_context", + "dispatch.earliest_time": "time_earliest", + "dispatch.latest_time": "time_latest", + "cron_schedule": "cron_schedule", + "realtime_schedule": "scheduling", + "schedule_window": "schedule_window", + "schedule_priority": "schedule_priority", + "alert.digest_mode": "trigger_alert", + "alert_type": "trigger_alert_when", + "alert_comparator": "trigger_alert_when_condition", + "alert_threshold": "trigger_alert_when_value", + "alert.suppress": "suppress_alerts", + "alert.suppress.period": "throttle_window_duration", + "alert.suppress.fields": "throttle_fields_to_group_by", + } + + def _check_argspec(self): + aav = AnsibleArgSpecValidator( + data=self._task.args, + schema=DOCUMENTATION, + schema_format="doc", + name=self._task.action, + ) + valid, errors, self._task.args = aav.validate() + if not valid: + self._result["failed"] = True + self._result["msg"] = errors + + def fail_json(self, msg): + """Replace the AnsibleModule fail_json here + :param msg: The message for the failure + :type msg: str + """ + msg = msg.replace("(basic.py)", self._task.action) + raise AnsibleActionFail(msg) + + # need to store 'annotations' and 'throttle_fields_to_group_by' + # since merging in the parsed form will eliminate any differences + # This is because these fields are getting converted from strings + # to lists/dictionaries, and so these fields need to be compared + # as such + def save_params(self, want_conf): + param_store = {} + if "annotations" in want_conf: + param_store["annotations"] = want_conf["annotations"] + if "throttle_fields_to_group_by" in want_conf: + param_store["throttle_fields_to_group_by"] = want_conf[ + "throttle_fields_to_group_by" + ] + + return param_store + + def map_params_to_object(self, config): + res = {} + + res["app"] = config["acl"]["app"] + res.update(map_params_to_obj(config["content"], self.key_transform)) + res.update(map_params_to_obj(config, self.key_transform)) + + if "scheduling" in res: + if res["scheduling"]: + res["scheduling"] = "realtime" + else: + res["scheduling"] = "continuous" + + if "trigger_alert" in res: + if res["trigger_alert"]: + res["trigger_alert"] = "once" + else: + res["trigger_alert"] = "for each result" + + if "throttle_fields_to_group_by" in res: + res["throttle_fields_to_group_by"] = res[ + "throttle_fields_to_group_by" + ].split(",") + + if "annotations" in res: + res["annotations"] = json.loads(res["annotations"]) + + custom = [] + + # need to check for custom annotation frameworks + for k, v in res["annotations"].items(): + if k in {"cis20", "nist", "mitre_attack", "kill_chain_phases"}: + + continue + entry = {} + entry["framework"] = k + entry["custom_annotations"] = v + custom.append(entry) + + if custom: + for entry in custom: + res["annotations"].pop(entry["framework"]) + res["annotations"]["custom"] = custom + + return res + + def map_objects_to_params(self, want_conf): + res = {} + + # setting parameters that enable correlation search + res["action.correlationsearch.enabled"] = "1" + res["is_scheduled"] = True + res["dispatch.rt_backfill"] = True + res["action.correlationsearch.label"] = want_conf["name"] + + res.update(map_obj_to_params(want_conf, self.key_transform)) + + if "realtime_schedule" in res: + if res["realtime_schedule"] == "realtime": + res["realtime_schedule"] = True + else: + res["realtime_schedule"] = False + + if "alert.digest_mode" in res: + if res["alert.digest_mode"] == "once": + res["alert.digest_mode"] = True + else: + res["alert.digest_mode"] = False + + if "alert.suppress.fields" in res: + res["alert.suppress.fields"] = ",".join( + res["alert.suppress.fields"] + ) + + if ( + "action.correlationsearch.annotations" in res + and "custom" in res["action.correlationsearch.annotations"] + ): + for ele in res["action.correlationsearch.annotations"]["custom"]: + res["action.correlationsearch.annotations"][ + ele["framework"] + ] = ele["custom_annotations"] + res["action.correlationsearch.annotations"].pop("custom") + res["action.correlationsearch.annotations"] = json.dumps( + res["action.correlationsearch.annotations"] + ) + + return res + + def search_for_resource_name(self, conn_request, correlation_search_name): + query_dict = conn_request.get_by_path( + "{0}/{1}".format( + self.api_object, + quote(correlation_search_name), + ) + ) + + search_result = {} + + if query_dict: + search_result = self.map_params_to_object(query_dict["entry"][0]) + + return search_result + + def delete_module_api_config(self, conn_request, config): + before = [] + after = None + changed = False + for want_conf in config: + search_by_name = self.search_for_resource_name( + conn_request, want_conf["name"] + ) + + if search_by_name: + before.append(search_by_name) + url = "{0}/{1}".format( + self.api_object, + quote(want_conf["name"]), + ) + conn_request.delete_by_path( + url, + ) + changed = True + after = [] + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def configure_module_api(self, conn_request, config): + before = [] + after = [] + changed = False + # Add to the THIS list for the value which needs to be excluded + # from HAVE params when compared to WANT param like 'ID' can be + # part of HAVE param but may not be part of your WANT param + defaults = {} + remove_from_diff_compare = [] + for want_conf in config: + have_conf = self.search_for_resource_name( + conn_request, want_conf["name"] + ) + + if have_conf: + want_conf = set_defaults(want_conf, defaults) + want_conf = utils.remove_empties(want_conf) + diff = utils.dict_diff(have_conf, want_conf) + + # Check if have_conf has extra parameters + if self._task.args["state"] == "replaced": + diff2 = utils.dict_diff(want_conf, have_conf) + if len(diff) or len(diff2): + diff.update(diff2) + + if diff: + name = want_conf["name"] + before.append(have_conf) + if self._task.args["state"] == "merged": + # need to store 'annotations' and 'throttle_group_by_field' + # since merging in the parsed form will eliminate any differences + param_store = self.save_params(want_conf) + + want_conf = utils.remove_empties( + utils.dict_merge(have_conf, want_conf) + ) + want_conf = remove_get_keys_from_payload_dict( + want_conf, remove_from_diff_compare + ) + + # restoring parameters + want_conf.update(param_store) + + changed = True + + payload = self.map_objects_to_params(want_conf) + + url = "{0}/{1}".format( + self.api_object, + quote(name), + ) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + elif self._task.args["state"] == "replaced": + self.delete_module_api_config( + conn_request=conn_request, config=[want_conf] + ) + changed = True + + payload = self.map_objects_to_params(want_conf) + + url = "{0}/{1}".format( + self.api_object, + quote(name), + ) + + # while creating new correlation search, this is how to set the 'app' field + if "app" in want_conf: + url = url.replace( + "SplunkEnterpriseSecuritySuite", + want_conf["app"], + ) + + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + else: + before.append(have_conf) + after.append(have_conf) + else: + changed = True + want_conf = utils.remove_empties(want_conf) + name = want_conf["name"] + payload = self.map_objects_to_params(want_conf) + + url = "{0}/{1}".format( + self.api_object, + quote(name), + ) + + # while creating new correlation search, this is how to set the 'app' field + if "app" in want_conf: + url = url.replace( + "SplunkEnterpriseSecuritySuite", want_conf["app"] + ) + + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.extend(before) + after.append(response_json) + if not changed: + after = None + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def run(self, tmp=None, task_vars=None): + self._supports_check_mode = True + self._result = super(ActionModule, self).run(tmp, task_vars) + + self._check_argspec() + if self._result.get("failed"): + return self._result + + self._result[self.module_name] = {} + + # config is retrieved as a string; need to deserialise + config = self._task.args.get("config") + + conn = Connection(self._connection.socket_path) + + conn_request = SplunkRequest( + action_module=self, + connection=conn, + not_rest_data_keys=["state"], + ) + + if self._task.args["state"] == "gathered": + if config: + self._result["changed"] = False + self._result["gathered"] = [] + for item in config: + result = self.search_for_resource_name( + conn_request, item["name"] + ) + if result: + self._result["gathered"].append(result) + for item in config: + self._result["gathered"].append( + self.search_for_resource_name( + conn_request, item["name"] + ) + ) + elif ( + self._task.args["state"] == "merged" + or self._task.args["state"] == "replaced" + ): + ( + self._result[self.module_name], + self._result["changed"], + ) = self.configure_module_api(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + elif self._task.args["state"] == "deleted": + ( + self._result[self.module_name], + self._result["changed"], + ) = self.delete_module_api_config(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + return self._result diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py new file mode 100644 index 000000000..7c9c03a55 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py @@ -0,0 +1,313 @@ +# +# Copyright 2022 Red Hat Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +""" +The module file for data_inputs_monitor +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.plugins.action import ActionBase +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible.module_utils.connection import Connection + +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, + map_obj_to_params, + map_params_to_obj, + remove_get_keys_from_payload_dict, + set_defaults, +) +from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import ( + AnsibleArgSpecValidator, +) +from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import ( + DOCUMENTATION, +) + + +class ActionModule(ActionBase): + """action module""" + + def __init__(self, *args, **kwargs): + super(ActionModule, self).__init__(*args, **kwargs) + self._result = None + self.api_object = "servicesNS/nobody/search/data/inputs/monitor" + self.module_name = "data_inputs_monitor" + self.key_transform = { + "blacklist": "blacklist", + "check-index": "check_index", # not returned + "check-path": "check_path", # not returned + "crc-salt": "crc_salt", + "disabled": "disabled", + "followTail": "follow_tail", + "host": "host", + "host_regex": "host_regex", + "host_segment": "host_segment", + "ignore-older-than": "ignore_older_than", # not returned + "index": "index", + "name": "name", + "recursive": "recursive", + "rename-source": "rename_source", # not returned + "sourcetype": "sourcetype", + "time-before-close": "time_before_close", # not returned + "whitelist": "whitelist", + } + + def _check_argspec(self): + aav = AnsibleArgSpecValidator( + data=self._task.args, + schema=DOCUMENTATION, + schema_format="doc", + name=self._task.action, + ) + valid, errors, self._task.args = aav.validate() + if not valid: + self._result["failed"] = True + self._result["msg"] = errors + + def map_params_to_object(self, config): + res = {} + res["name"] = config["name"] + + # splunk takes "crc-salt" as input parameter, and returns "crcSalt" in output + # therefore we can't directly use mapping + if config["content"].get("crcSalt"): + config["content"]["crc-salt"] = config["content"]["crcSalt"] + + res.update(map_params_to_obj(config["content"], self.key_transform)) + + return res + + def search_for_resource_name(self, conn_request, directory_name): + query_dict = conn_request.get_by_path( + "{0}/{1}".format(self.api_object, quote_plus(directory_name)) + ) + + search_result = {} + + if query_dict: + search_result = self.map_params_to_object(query_dict["entry"][0]) + + return search_result + + def delete_module_api_config(self, conn_request, config): + before = [] + after = None + changed = False + for want_conf in config: + search_by_name = self.search_for_resource_name( + conn_request, want_conf["name"] + ) + if search_by_name: + before.append(search_by_name) + conn_request.delete_by_path( + "{0}/{1}".format( + self.api_object, quote_plus(want_conf["name"]) + ) + ) + changed = True + after = [] + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def configure_module_api(self, conn_request, config): + before = [] + after = [] + changed = False + # Add to the THIS list for the value which needs to be excluded + # from HAVE params when compared to WANT param like 'ID' can be + # part of HAVE param but may not be part of your WANT param + defaults = { + "disabled": False, + "host": "$decideOnStartup", + "index": "default", + } + remove_from_diff_compare = [ + "check_path", + "check_index", + "ignore_older_than", + "time_before_close", + "rename_source", + ] + for want_conf in config: + have_conf = self.search_for_resource_name( + conn_request, want_conf["name"] + ) + + if have_conf: + want_conf = set_defaults(want_conf, defaults) + want_conf = utils.remove_empties(want_conf) + diff = utils.dict_diff(have_conf, want_conf) + + # Check if have_conf has extra parameters + if self._task.args["state"] == "replaced": + diff2 = utils.dict_diff(want_conf, have_conf) + if len(diff) or len(diff2): + diff.update(diff2) + + if diff: + diff = remove_get_keys_from_payload_dict( + diff, remove_from_diff_compare + ) + if diff: + before.append(have_conf) + if self._task.args["state"] == "merged": + + want_conf = utils.remove_empties( + utils.dict_merge(have_conf, want_conf) + ) + want_conf = remove_get_keys_from_payload_dict( + want_conf, remove_from_diff_compare + ) + changed = True + + payload = map_obj_to_params( + want_conf, self.key_transform + ) + url = "{0}/{1}".format( + self.api_object, + quote_plus(payload.pop("name")), + ) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + elif self._task.args["state"] == "replaced": + conn_request.delete_by_path( + "{0}/{1}".format( + self.api_object, + quote_plus(want_conf["name"]), + ) + ) + changed = True + + payload = map_obj_to_params( + want_conf, self.key_transform + ) + url = "{0}".format(self.api_object) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.append(response_json) + else: + before.append(have_conf) + after.append(have_conf) + else: + before.append(have_conf) + after.append(have_conf) + else: + changed = True + want_conf = utils.remove_empties(want_conf) + + payload = map_obj_to_params(want_conf, self.key_transform) + url = "{0}".format(self.api_object) + api_response = conn_request.create_update( + url, + data=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0] + ) + + after.extend(before) + after.append(response_json) + if not changed: + after = None + + res_config = {} + res_config["after"] = after + res_config["before"] = before + + return res_config, changed + + def run(self, tmp=None, task_vars=None): + + self._supports_check_mode = True + self._result = super(ActionModule, self).run(tmp, task_vars) + + self._check_argspec() + if self._result.get("failed"): + return self._result + + # self._result[self.module_name] = {} + + config = self._task.args.get("config") + + conn = Connection(self._connection.socket_path) + + conn_request = SplunkRequest( + action_module=self, + connection=conn, + not_rest_data_keys=["state"], + ) + + if self._task.args["state"] == "gathered": + if config: + self._result["gathered"] = [] + self._result["changed"] = False + for item in config: + result = self.search_for_resource_name( + conn_request, item["name"] + ) + if result: + self._result["gathered"].append(result) + else: + self._result["gathered"] = conn_request.get_by_path( + self.api_object + )["entry"] + elif ( + self._task.args["state"] == "merged" + or self._task.args["state"] == "replaced" + ): + ( + self._result[self.module_name], + self._result["changed"], + ) = self.configure_module_api(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + elif self._task.args["state"] == "deleted": + ( + self._result[self.module_name], + self._result["changed"], + ) = self.delete_module_api_config(conn_request, config) + if self._result[self.module_name]["after"] is None: + self._result[self.module_name].pop("after") + + return self._result diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py new file mode 100644 index 000000000..bd72d12b5 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py @@ -0,0 +1,538 @@ +# +# Copyright 2022 Red Hat Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +""" +The module file for data_inputs_network +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.plugins.action import ActionBase +from ansible.errors import AnsibleActionFail +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible.module_utils.connection import Connection + +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, + map_obj_to_params, + map_params_to_obj, + remove_get_keys_from_payload_dict, +) +from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import ( + AnsibleArgSpecValidator, +) +from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import ( + DOCUMENTATION, +) + + +class ActionModule(ActionBase): + """action module""" + + def __init__(self, *args, **kwargs): + super(ActionModule, self).__init__(*args, **kwargs) + self._result = None + self.api_object = "servicesNS/nobody/search/data/inputs" + self.module_return = "data_inputs_network" + self.key_transform = { + "name": "name", + "connection_host": "connection_host", + "disabled": "disabled", + "index": "index", + "host": "host", + "no_appending_timestamp": "no_appending_timestamp", + "no_priority_stripping": "no_priority_stripping", + "rawTcpDoneTimeout": "raw_tcp_done_timeout", + "restrictToHost": "restrict_to_host", + "queue": "queue", + "SSL": "ssl", + "source": "source", + "sourcetype": "sourcetype", + "token": "token", + "password": "password", + "requireClientCert": "require_client_cert", + "rootCA": "root_ca", + "serverCert": "server_cert", + "cipherSuite": "cipher_suite", + } + + def _check_argspec(self): + aav = AnsibleArgSpecValidator( + data=self._task.args, + schema=DOCUMENTATION, + schema_format="doc", + name=self._task.action, + ) + valid, errors, self._task.args = aav.validate() + if not valid: + self._result["failed"] = True + self._result["msg"] = errors + + def fail_json(self, msg): + """Replace the AnsibleModule fail_json here + :param msg: The message for the failure + :type msg: str + """ + msg = msg.replace("(basic.py)", self._task.action) + raise AnsibleActionFail(msg) + + def map_params_to_object(self, config, datatype=None): + res = {} + + res["name"] = config["name"] + res.update(map_params_to_obj(config["content"], self.key_transform)) + + # API returns back "index", even though it can't be set within /tcp/cooked + if datatype: + if datatype == "cooked" and "index" in res: + res.pop("index") + elif datatype == "splunktcptoken": + if "index" in res: + res.pop("index") + if "host" in res: + res.pop("host") + if "disabled" in res: + res.pop("disabled") + + return res + + # This function is meant to construct the URL and handle GET, POST and DELETE calls + # depending on th context. The URLs constructed and handled are: + # /tcp/raw[/{name}] + # /tcp/cooked[/{name}] + # /tcp/splunktcptoken[/{name}] + # /tcp/ssl[/{name}] + # /udp[/{name}] + def request_by_path( + self, + conn_request, + protocol, + datatype=None, + name=None, + req_type="get", + payload=None, + ): + query_dict = None + url = "" + + if protocol == "tcp": + if not datatype: + raise AnsibleActionFail("No datatype specified for TCP input") + + # In all cases except "ssl" datatype, creation of objects is handled + # by a POST request to the parent directory. Therefore name shouldn't + # be included in the URL. + if not name or (req_type == "post_create" and datatype != "ssl"): + name = "" + + url = "{0}/{1}/{2}/{3}".format( + self.api_object, + protocol, + datatype, + quote_plus(str(name)), + ) + # if no "name" was provided + if url[-1] == "/": + url = url[:-1] + + elif protocol == "udp": + if datatype: + raise AnsibleActionFail("Datatype specified for UDP input") + if not name or req_type == "post_create": + name = "" + + url = "{0}/{1}/{2}".format( + self.api_object, + protocol, + quote_plus(str(name)), + ) + # if no "name" was provided + if url[-1] == "/": + url = url[:-1] + else: + raise AnsibleActionFail( + "Incompatible protocol specified. Please specify 'tcp' or 'udp'" + ) + + if req_type == "get": + query_dict = conn_request.get_by_path(url) + elif req_type == "post_create": + query_dict = conn_request.create_update(url, data=payload) + elif req_type == "post_update": + payload.pop("name") + query_dict = conn_request.create_update(url, data=payload) + elif req_type == "delete": + query_dict = conn_request.delete_by_path(url) + + return query_dict + + def search_for_resource_name(self, conn_request, protocol, datatype, name): + query_dict = self.request_by_path( + conn_request, + protocol, + datatype, + name, + ) + + search_result = {} + + if query_dict: + search_result = self.map_params_to_object( + query_dict["entry"][0], datatype + ) + + # Adding back protocol and datatype fields for better clarity + search_result["protocol"] = protocol + if datatype: + search_result["datatype"] = datatype + if datatype == "ssl": + search_result["name"] = name + + return search_result + + # If certain parameters are present, Splunk appends the value of those parameters + # to the name. Therefore this causes idempotency to fail. This function looks for + # said parameters and conducts checks to see if the configuration already exists. + def parse_config(self, conn_request, want_conf): + old_name = None + protocol = want_conf["protocol"] + datatype = want_conf.get("datatype") + + if not want_conf.get("name"): + raise AnsibleActionFail("No name specified for merge action") + else: + # Int values confuse diff + want_conf["name"] = str(want_conf["name"]) + + old_name = want_conf["name"] + + if ( + want_conf.get("restrict_to_host") + and old_name.split(":")[0] == want_conf["restrict_to_host"] + ): + old_name = old_name.split(":")[1] + + # If "restrictToHost" parameter is set, the value of this parameter is appended + # to the numerical name meant to represent port number + if ( + want_conf.get("restrict_to_host") + and want_conf["restrict_to_host"] not in want_conf["name"] + ): + want_conf["name"] = "{0}:{1}".format( + want_conf["restrict_to_host"], want_conf["name"] + ) + + # If datatype is "splunktcptoken", the value "splunktcptoken://" is appended + # to the name + elif ( + datatype + and datatype == "splunktcptoken" + and "splunktcptoken://" not in want_conf["name"] + ): + want_conf["name"] = "{0}{1}".format( + "splunktcptoken://", want_conf["name"] + ) + + name = want_conf["name"] + + # If the above parameters are present, but the object doesn't exist + # the value of the parameters shouldn't be prepended to the name. + # Otherwise Splunk returns 400. This check is takes advantage of this + # and sets the correct name. + have_conf = None + try: + have_conf = self.search_for_resource_name( + conn_request, + protocol, + datatype, + name, + ) + # while creating new conf, we need to only use numerical values + # splunk will later append param value to it. + if not have_conf: + want_conf["name"] = old_name + except AnsibleActionFail: + want_conf["name"] = old_name + have_conf = self.search_for_resource_name( + conn_request, + protocol, + datatype, + old_name, + ) + + # SSL response returns a blank "name" parameter, which causes problems + if datatype == "ssl": + have_conf["name"] = want_conf["name"] + + return have_conf, protocol, datatype, name, old_name + + def delete_module_api_config(self, conn_request, config): + before = [] + after = None + changed = False + for want_conf in config: + if not want_conf.get("name"): + raise AnsibleActionFail("No name specified") + + have_conf, protocol, datatype, name, _old_name = self.parse_config( + conn_request, want_conf + ) + + if protocol == "tcp" and datatype == "ssl": + raise AnsibleActionFail("Deleted state not supported for SSL") + + if have_conf: + before.append(have_conf) + self.request_by_path( + conn_request, + protocol, + datatype, + name, + req_type="delete", + ) + changed = True + after = [] + + ret_config = {} + ret_config["before"] = before + ret_config["after"] = after + + return ret_config, changed + + def configure_module_api(self, conn_request, config): + before = [] + after = [] + changed = False + + for want_conf in config: + # Add to the THIS list for the value which needs to be excluded + # from HAVE params when compared to WANT param like 'ID' can be + # part of HAVE param but may not be part of your WANT param + remove_from_diff_compare = [ + "datatype", + "protocol", + "cipher_suite", + ] + + have_conf, protocol, datatype, name, old_name = self.parse_config( + conn_request, want_conf + ) + + if ( + protocol == "tcp" + and datatype == "ssl" + and self._task.args["state"] == "replaced" + ): + raise AnsibleActionFail("Replaced state not supported for SSL") + + if have_conf: + want_conf = utils.remove_empties(want_conf) + diff = utils.dict_diff(have_conf, want_conf) + + # Check if have_conf has extra parameters + if self._task.args["state"] == "replaced": + diff2 = utils.dict_diff(want_conf, have_conf) + if len(diff) or len(diff2): + diff.update(diff2) + + if diff: + diff = remove_get_keys_from_payload_dict( + diff, remove_from_diff_compare + ) + if diff: + before.append(have_conf) + if self._task.args["state"] == "merged": + + want_conf = utils.remove_empties( + utils.dict_merge(have_conf, want_conf) + ) + want_conf = remove_get_keys_from_payload_dict( + want_conf, remove_from_diff_compare + ) + changed = True + + payload = map_obj_to_params( + want_conf, self.key_transform + ) + api_response = self.request_by_path( + conn_request, + protocol, + datatype, + name, + req_type="post_update", + payload=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0], datatype + ) + + # Adding back protocol and datatype fields for better clarity + response_json["protocol"] = protocol + if datatype: + response_json["datatype"] = datatype + + after.append(response_json) + elif self._task.args["state"] == "replaced": + api_response = self.request_by_path( + conn_request, + protocol, + datatype, + name, + req_type="delete", + ) + + changed = True + payload = map_obj_to_params( + want_conf, self.key_transform + ) + # while creating new conf, we need to only use numerical values + # splunk will later append param value to it. + payload["name"] = old_name + + api_response = self.request_by_path( + conn_request, + protocol, + datatype, + name, + req_type="post_create", + payload=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0], datatype + ) + + # Adding back protocol and datatype fields for better clarity + response_json["protocol"] = protocol + if datatype: + response_json["datatype"] = datatype + + after.append(response_json) + else: + before.append(have_conf) + after.append(have_conf) + else: + before.append(have_conf) + after.append(have_conf) + else: + changed = True + want_conf = utils.remove_empties(want_conf) + + payload = map_obj_to_params(want_conf, self.key_transform) + + api_response = self.request_by_path( + conn_request, + protocol, + datatype, + name, + req_type="post_create", + payload=payload, + ) + response_json = self.map_params_to_object( + api_response["entry"][0], datatype + ) + + # Adding back protocol and datatype fields for better clarity + response_json["protocol"] = protocol + if datatype: + response_json["datatype"] = datatype + + after.extend(before) + after.append(response_json) + if not changed: + after = None + + ret_config = {} + ret_config["before"] = before + ret_config["after"] = after + + return ret_config, changed + + def run(self, tmp=None, task_vars=None): + self._supports_check_mode = True + self._result = super(ActionModule, self).run(tmp, task_vars) + self._check_argspec() + if self._result.get("failed"): + return self._result + + config = self._task.args.get("config") + + conn = Connection(self._connection.socket_path) + + conn_request = SplunkRequest( + connection=conn, + action_module=self, + ) + + if self._task.args["state"] == "gathered": + if config: + self._result["gathered"] = [] + self._result["changed"] = False + for item in config: + if item.get("name"): + + result = self.search_for_resource_name( + conn_request, + item["protocol"], + item.get("datatype"), + item.get("name"), + ) + if result: + self._result["gathered"].append(result) + else: + response_list = self.request_by_path( + conn_request, + item["protocol"], + item.get("datatype"), + None, + )["entry"] + self._result["gathered"] = [] + for response_dict in response_list: + self._result["gathered"].append( + self.map_params_to_object(response_dict), + ) + else: + raise AnsibleActionFail("No protocol specified") + + elif ( + self._task.args["state"] == "merged" + or self._task.args["state"] == "replaced" + ): + if config: + ( + self._result[self.module_return], + self._result["changed"], + ) = self.configure_module_api(conn_request, config) + if not self._result[self.module_return]["after"]: + self._result[self.module_return].pop("after") + + elif self._task.args["state"] == "deleted": + if config: + ( + self._result[self.module_return], + self._result["changed"], + ) = self.delete_module_api_config(conn_request, config) + if self._result[self.module_return]["after"] is None: + self._result[self.module_return].pop("after") + + return self._result diff --git a/ansible_collections/splunk/es/plugins/httpapi/splunk.py b/ansible_collections/splunk/es/plugins/httpapi/splunk.py new file mode 100644 index 000000000..91f079e06 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/httpapi/splunk.py @@ -0,0 +1,77 @@ +# (c) 2019 Red Hat Inc. +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +author: Ansible Security Team (@ansible-security) +name: splunk +short_description: HttpApi Plugin for Splunk +description: + - This HttpApi plugin provides methods to connect to Splunk over a + HTTP(S)-based api. +version_added: "1.0.0" +""" + +import json + +from ansible.module_utils.basic import to_text +from ansible.errors import AnsibleConnectionFailure +from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import ( + HttpApiBase, +) +from ansible.module_utils.connection import ConnectionError + +BASE_HEADERS = {"Content-Type": "application/json"} + + +class HttpApi(HttpApiBase): + def send_request(self, request_method, path, payload=None): + # payload = json.dumps(payload) if payload else '{}' + + try: + self._display_request(request_method, path) + response, response_data = self.connection.send( + path, + payload, + method=request_method, + headers=BASE_HEADERS, + force_basic_auth=True, + ) + value = self._get_response_value(response_data) + + return response.getcode(), self._response_to_json(value) + except AnsibleConnectionFailure as e: + self.connection.queue_message( + "vvv", "AnsibleConnectionFailure: %s" % e + ) + if to_text("Could not connect to") in to_text(e): + raise + if to_text("401") in to_text(e): + return 401, "Authentication failure" + else: + return 404, "Object not found" + except HTTPError as e: + error = json.loads(e.read()) + return e.code, error + + def _display_request(self, request_method, path): + self.connection.queue_message( + "vvvv", + "Web Services: %s %s/%s" + % (request_method, self.connection._url, path), + ) + + def _get_response_value(self, response_data): + return to_text(response_data.getvalue()) + + def _response_to_json(self, response_text): + try: + return json.loads(response_text) if response_text else {} + # JSONDecodeError only available on Python 3.5+ + except ValueError: + raise ConnectionError("Invalid JSON response: %s" % response_text) diff --git a/ansible_collections/splunk/es/plugins/module_utils/splunk.py b/ansible_collections/splunk/es/plugins/module_utils/splunk.py new file mode 100644 index 000000000..240481d3a --- /dev/null +++ b/ansible_collections/splunk/es/plugins/module_utils/splunk.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils.urls import CertificateError +from ansible.module_utils.six.moves.urllib.parse import urlencode +from ansible.module_utils.connection import ( + ConnectionError, + Connection, +) +from ansible.module_utils._text import to_text +from ansible.module_utils.six import iteritems + + +def parse_splunk_args(module): + """ + Get the valid fields that should be passed to the REST API as urlencoded + data so long as the argument specification to the module follows the + convention: + 1) name field is Required to be passed as data to REST API + 2) all module argspec items that should be passed to data are not + Required by the module and are set to default=None + """ + try: + splunk_data = {} + for argspec in module.argument_spec: + if ( + "default" in module.argument_spec[argspec] + and module.argument_spec[argspec]["default"] is None + and module.params[argspec] is not None + ): + splunk_data[argspec] = module.params[argspec] + return splunk_data + except TypeError as e: + module.fail_json( + msg="Invalid data type provided for splunk module_util.parse_splunk_args: {0}".format( + e + ) + ) + + +def remove_get_keys_from_payload_dict(payload_dict, remove_key_list): + for each_key in remove_key_list: + if each_key in payload_dict: + payload_dict.pop(each_key) + return payload_dict + + +def map_params_to_obj(module_params, key_transform): + """The fn to convert the api returned params to module params + :param module_params: Module params + :param key_transform: Dict with module equivalent API params + :rtype: A dict + :returns: dict with module prams transformed having API expected params + """ + + obj = {} + for k, v in iteritems(key_transform): + if k in module_params and ( + module_params.get(k) + or module_params.get(k) == 0 + or module_params.get(k) is False + ): + obj[v] = module_params.pop(k) + return obj + + +def map_obj_to_params(module_return_params, key_transform): + """The fn to convert the module params to api return params + :param module_return_params: API returned response params + :param key_transform: Module params + :rtype: A dict + :returns: dict with api returned value to module param value + """ + temp = {} + for k, v in iteritems(key_transform): + if v in module_return_params and ( + module_return_params.get(v) + or module_return_params.get(v) == 0 + or module_return_params.get(v) is False + ): + temp[k] = module_return_params.pop(v) + return temp + + +def set_defaults(config, defaults): + for k, v in defaults.items(): + config.setdefault(k, v) + return config + + +class SplunkRequest(object): + # TODO: There is a ton of code only present to make sure the legacy modules + # work as intended. Once the modules are deprecated and no longer receive + # support, this object needs to be rewritten. + def __init__( + self, + module=None, + headers=None, + action_module=None, # needs to be dealt with after end of support + connection=None, + keymap=None, + not_rest_data_keys=None, + # The legacy modules had a partial implementation of keymap, where the data + # passed to 'create_update' would completely be overwritten, and replaced + # by the 'get_data' function. This flag ensures that the modules that hadn't + # yet been updated to use the keymap, can continue to work as originally intended + override=True, + ): + # check if call being made by legacy module (passes 'module' param) + self.module = module + if module: + # This will be removed, once all of the available modules + # are moved to use action plugin design, as otherwise test + # would start to complain without the implementation. + self.connection = Connection(self.module._socket_path) + self.legacy = True + elif connection: + self.connection = connection + try: + self.connection.load_platform_plugins("splunk.es.splunk") + self.module = action_module + self.legacy = False + + except ConnectionError: + raise + + # The Splunk REST API endpoints often use keys that aren't pythonic so + # we need to handle that with a mapping to allow keys to be proper + # variables in the module argspec + if keymap is None: + self.keymap = {} + else: + self.keymap = keymap + + # Select whether payload passed to create update is overriden or not + self.override = override + + # This allows us to exclude specific argspec keys from being included by + # the rest data that don't follow the splunk_* naming convention + if not_rest_data_keys is None: + self.not_rest_data_keys = [] + else: + self.not_rest_data_keys = not_rest_data_keys + self.not_rest_data_keys.append("validate_certs") + + def _httpapi_error_handle(self, method, uri, payload=None): + try: + code, response = self.connection.send_request( + method, uri, payload=payload + ) + + if code == 404: + if to_text("Object not found") in to_text(response) or to_text( + "Could not find object" + ) in to_text(response): + return {} + + if not (code >= 200 and code < 300): + self.module.fail_json( + msg="Splunk httpapi returned error {0} with message {1}".format( + code, response + ), + ) + + return response + + except ConnectionError as e: + self.module.fail_json( + msg="connection error occurred: {0}".format(e), + ) + except CertificateError as e: + self.module.fail_json( + msg="certificate error occurred: {0}".format(e), + ) + except ValueError as e: + try: + self.module.fail_json( + msg="certificate not found: {0}".format(e) + ) + except AttributeError: + pass + + def get(self, url, **kwargs): + return self._httpapi_error_handle("GET", url, **kwargs) + + def put(self, url, **kwargs): + return self._httpapi_error_handle("PUT", url, **kwargs) + + def post(self, url, **kwargs): + return self._httpapi_error_handle("POST", url, **kwargs) + + def delete(self, url, **kwargs): + return self._httpapi_error_handle("DELETE", url, **kwargs) + + def get_data(self, config=None): + """ + Get the valid fields that should be passed to the REST API as urlencoded + data so long as the argument specification to the module follows the + convention: + - the key to the argspec item does not start with splunk_ + - the key does not exist in the not_data_keys list + """ + try: + splunk_data = {} + if self.legacy and not config: + config = self.module.params + for param in config: + if (config[param]) is not None and ( + param not in self.not_rest_data_keys + ): + if param in self.keymap: + splunk_data[self.keymap[param]] = config[param] + else: + splunk_data[param] = config[param] + + return splunk_data + + except TypeError as e: + self.module.fail_json( + msg="invalid data type provided: {0}".format(e) + ) + + def get_urlencoded_data(self, config): + return urlencode(self.get_data(config)) + + def get_by_path(self, rest_path): + """ + GET attributes of a monitor by rest path + """ + + return self.get("/{0}?output_mode=json".format(rest_path)) + + def delete_by_path(self, rest_path): + """ + DELETE attributes of a monitor by rest path + """ + + return self.delete("/{0}?output_mode=json".format(rest_path)) + + def create_update(self, rest_path, data): + """ + Create or Update a file/directory monitor data input in Splunk + """ + # when 'self.override' is True, the 'get_data' function replaces 'data' + # in order to make use of keymap + if data is not None and self.override: + data = self.get_urlencoded_data(data) + return self.post( + "/{0}?output_mode=json".format(rest_path), payload=data + ) diff --git a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py new file mode 100644 index 000000000..29099424e --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py @@ -0,0 +1,462 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: adaptive_response_notable_event +short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses +description: + - This module allows for creation, deletion, and modification of Splunk + Enterprise Security Notable Event Adaptive Responses that are associated + with a correlation search +version_added: "1.0.0" +deprecated: + alternative: splunk_adaptive_response_notable_events + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - Name of notable event + required: true + type: str + correlation_search_name: + description: + - Name of correlation search to associate this notable event adaptive response with + required: true + type: str + description: + description: + - Description of the notable event, this will populate the description field for the web console + required: true + type: str + state: + description: + - Add or remove a data source. + required: true + choices: [ "present", "absent" ] + type: str + security_domain: + description: + - Splunk Security Domain + type: str + required: False + choices: + - "access" + - "endpoint" + - "network" + - "threat" + - "identity" + - "audit" + default: "threat" + severity: + description: + - Severity rating + type: str + required: False + choices: + - "informational" + - "low" + - "medium" + - "high" + - "critical" + - "unknown" + default: "high" + default_owner: + description: + - Default owner of the notable event, if unset it will default to Splunk System Defaults + type: str + required: False + default_status: + description: + - Default status of the notable event, if unset it will default to Splunk System Defaults + type: str + required: False + choices: + - "unassigned" + - "new" + - "in progress" + - "pending" + - "resolved" + - "closed" + drill_down_name: + description: + - Name for drill down search, Supports variable substitution with fields from the matching event. + type: str + required: False + drill_down_search: + description: + - Drill down search, Supports variable substitution with fields from the matching event. + type: str + required: False + drill_down_earliest_offset: + description: + - Set the amount of time before the triggering event to search for related + events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time + to match the earliest time of the search + type: str + required: False + default: \"$info_min_time$\" + drill_down_latest_offset: + description: + - Set the amount of time after the triggering event to search for related + events. For example, 1m. Use \"$info_max_time$\" to set the drill-down + time to match the latest time of the search + type: str + required: False + default: \"$info_max_time$\" + investigation_profiles: + description: + - Investigation profile to assiciate the notable event with. + type: str + required: False + next_steps: + description: + - List of adaptive responses that should be run next + - Describe next steps and response actions that an analyst could take to address this threat. + type: list + elements: str + required: False + recommended_actions: + description: + - List of adaptive responses that are recommended to be run next + - Identifying Recommended Adaptive Responses will highlight those actions + for the analyst when looking at the list of response actions available, + making it easier to find them among the longer list of available actions. + type: list + elements: str + required: False + asset_extraction: + description: + - list of assets to extract, select any one or many of the available choices + - defaults to all available choices + type: list + elements: str + choices: + - src + - dest + - dvc + - orig_host + default: + - src + - dest + - dvc + - orig_host + required: False + identity_extraction: + description: + - list of identity fields to extract, select any one or many of the available choices + - defaults to all available choices + type: list + elements: str + choices: + - user + - src_user + default: + - user + - src_user + required: False + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example of using splunk.es.adaptive_response_notable_event module + splunk.es.adaptive_response_notable_event: + name: "Example notable event from Ansible" + correlation_search_name: "Example Correlation Search From Ansible" + description: "Example notable event from Ansible, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + - ansiblesecurityautomation +""" + +import json + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + name=dict(required=True, type="str"), + correlation_search_name=dict(required=True, type="str"), + description=dict(required=True, type="str"), + state=dict(choices=["present", "absent"], required=True), + security_domain=dict( + choices=[ + "access", + "endpoint", + "network", + "threat", + "identity", + "audit", + ], + required=False, + default="threat", + ), + severity=dict( + choices=[ + "informational", + "low", + "medium", + "high", + "critical", + "unknown", + ], + required=False, + default="high", + ), + default_owner=dict(required=False, type="str"), + default_status=dict( + choices=[ + "unassigned", + "new", + "in progress", + "pending", + "resolved", + "closed", + ], + required=False, + ), + drill_down_name=dict(required=False, type="str"), + drill_down_search=dict(required=False, type="str"), + drill_down_earliest_offset=dict( + required=False, type="str", default="$info_min_time$" + ), + drill_down_latest_offset=dict( + required=False, type="str", default="$info_max_time$" + ), + investigation_profiles=dict(required=False, type="str"), + next_steps=dict( + required=False, type="list", elements="str", default=[] + ), + recommended_actions=dict( + required=False, type="list", elements="str", default=[] + ), + asset_extraction=dict( + required=False, + type="list", + elements="str", + default=["src", "dest", "dvc", "orig_host"], + choices=["src", "dest", "dvc", "orig_host"], + ), + identity_extraction=dict( + required=False, + type="list", + elements="str", + default=["user", "src_user"], + choices=["user", "src_user"], + ), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + override=False, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state"], + ) + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ) + ) + + # Have to custom craft the data here because they overload the saved searches + # endpoint in the rest api and we want to hide the nuance from the user + request_post_data = {} + + # FIXME need to figure out how to properly support these, the possible values appear to + # be dynamically created based on what the search is indexing + # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]' + # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"] + if module.params["next_steps"]: + if len(module.params["next_steps"]) == 1: + next_steps = "[[action|{0}]]".format( + module.params["next_steps"][0] + ) + else: + next_steps = "" + for next_step in module.params["next_steps"]: + if next_steps: + next_steps += "\n[[action|{0}]]".format(next_step) + else: + next_steps = "[[action|{0}]]".format(next_step) + + # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI + # but I don't know what it is/means because there's no docs on it + next_steps_dict = {"version": 1, "data": next_steps} + request_post_data["action.notable.param.next_steps"] = json.dumps( + next_steps_dict + ) + + if module.params["recommended_actions"]: + if len(module.params["recommended_actions"]) == 1: + request_post_data[ + "action.notable.param.recommended_actions" + ] = module.params["recommended_actions"][0] + else: + request_post_data[ + "action.notable.param.recommended_actions" + ] = ",".join(module.params["recommended_actions"]) + + request_post_data["action.notable.param.rule_description"] = module.params[ + "description" + ] + request_post_data["action.notable.param.rule_title"] = module.params[ + "name" + ] + request_post_data["action.notable.param.security_domain"] = module.params[ + "security_domain" + ] + request_post_data["action.notable.param.severity"] = module.params[ + "severity" + ] + request_post_data["action.notable.param.asset_extraction"] = module.params[ + "asset_extraction" + ] + request_post_data[ + "action.notable.param.identity_extraction" + ] = module.params["identity_extraction"] + + # NOTE: this field appears to be hard coded when you create this via the splunk web UI + # but I don't know what it is/means because there's no docs on it + request_post_data["action.notable.param.verbose"] = "0" + + if module.params["default_owner"]: + request_post_data[ + "action.notable.param.default_owner" + ] = module.params["default_owner"] + + if module.params["default_status"]: + request_post_data[ + "action.notable.param.default_status" + ] = module.params["default_status"] + + request_post_data = utils.remove_empties(request_post_data) + + if query_dict: + request_post_data["search"] = query_dict["entry"][0]["content"][ + "search" + ] + if "actions" in query_dict["entry"][0]["content"]: + if query_dict["entry"][0]["content"]["actions"] == "notable": + pass + elif ( + len(query_dict["entry"][0]["content"]["actions"].split(",")) + > 0 + and "notable" + not in query_dict["entry"][0]["content"]["actions"] + ): + request_post_data["actions"] = ( + query_dict["entry"][0]["content"]["actions"] + ", notable" + ) + else: + request_post_data["actions"] = "notable" + else: + module.fail_json( + msg="Unable to find correlation search: {0}", + splunk_data=query_dict, + ) + + if module.params["state"] == "present": + needs_change = False + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + if to_text(query_dict["entry"][0]["content"][arg]) != to_text( + request_post_data[arg] + ): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, + msg="{0} updated.".format( + module.params["correlation_search_name"] + ), + splunk_data=splunk_data, + ) + + if module.params["state"] == "absent": + # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + needs_change = True + del query_dict["entry"][0]["content"][arg] + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, + msg="{0} updated.".format( + module.params["correlation_search_name"] + ), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search.py b/ansible_collections/splunk/es/plugins/modules/correlation_search.py new file mode 100644 index 000000000..9c865507b --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/correlation_search.py @@ -0,0 +1,376 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: correlation_search +short_description: Manage Splunk Enterprise Security Correlation Searches +description: + - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches +version_added: "1.0.0" +deprecated: + alternative: splunk_correlation_searches + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - Name of coorelation search + required: True + type: str + description: + description: + - Description of the coorelation search, this will populate the description field for the web console + required: True + type: str + state: + description: + - Add, remove, enable, or disiable a correlation search. + required: True + choices: [ "present", "absent", "enabled", "disabled" ] + type: str + search: + description: + - SPL search string + type: str + required: True + app: + description: + - Splunk app to associate the correlation seach with + type: str + required: False + default: "SplunkEnterpriseSecuritySuite" + ui_dispatch_context: + description: + - Set an app to use for links such as the drill-down search in a notable + event or links in an email adaptive response action. If None, uses the + Application Context. + type: str + required: False + time_earliest: + description: + - Earliest time using relative time modifiers. + type: str + required: False + default: "-24h" + time_latest: + description: + - Latest time using relative time modifiers. + type: str + required: False + default: "now" + cron_schedule: + description: + - Enter a cron-style schedule. + - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM). + - Real-time searches use a default schedule of C('*/5 * * * *'). + type: str + required: False + default: "*/5 * * * *" + scheduling: + description: + - Controls the way the scheduler computes the next execution time of a scheduled search. + - > + Learn more: + https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling + type: str + required: False + default: "real-time" + choices: + - "real-time" + - "continuous" + schedule_window: + description: + - Let report run at any time within a window that opens at its scheduled run time, + to improve efficiency when there are many concurrently scheduled reports. + The "auto" setting automatically determines the best window width for the report. + type: str + required: False + default: "0" + schedule_priority: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + required: False + default: "Default" + choices: + - "Default" + - "Higher" + - "Highest" + trigger_alert_when: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + required: False + default: "number of events" + choices: + - "number of events" + - "number of results" + - "number of hosts" + - "number of sources" + trigger_alert_when_condition: + description: + - Conditional to pass to C(trigger_alert_when) + type: str + required: False + default: "greater than" + choices: + - "greater than" + - "less than" + - "equal to" + - "not equal to" + - "drops by" + - "rises by" + trigger_alert_when_value: + description: + - Value to pass to C(trigger_alert_when) + type: str + required: False + default: "10" + throttle_window_duration: + description: + - "How much time to ignore other events that match the field values specified in Fields to group by." + type: str + required: False + throttle_fields_to_group_by: + description: + - "Type the fields to consider for matching events for throttling." + type: str + required: False + suppress_alerts: + description: + - "To suppress alerts from this correlation search or not" + type: bool + required: False + default: False +notes: + - > + The following options are not yet supported: + throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example of creating a correlation search with splunk.es.coorelation_search + splunk.es.correlation_search: + name: "Example Coorelation Search From Ansible" + description: "Example Coorelation Search From Ansible, description." + search: 'source="/var/log/snort.log"' + state: "present" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text + +from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus +from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + argspec = dict( + name=dict(required=True, type="str"), + description=dict(required=True, type="str"), + state=dict( + choices=["present", "absent", "enabled", "disabled"], required=True + ), + search=dict(required=True, type="str"), + app=dict( + type="str", required=False, default="SplunkEnterpriseSecuritySuite" + ), + ui_dispatch_context=dict(type="str", required=False), + time_earliest=dict(type="str", required=False, default="-24h"), + time_latest=dict(type="str", required=False, default="now"), + cron_schedule=dict(type="str", required=False, default="*/5 * * * *"), + scheduling=dict( + type="str", + required=False, + default="real-time", + choices=["real-time", "continuous"], + ), + schedule_window=dict(type="str", required=False, default="0"), + schedule_priority=dict( + type="str", + required=False, + default="Default", + choices=["Default", "Higher", "Highest"], + ), + trigger_alert_when=dict( + type="str", + required=False, + default="number of events", + choices=[ + "number of events", + "number of results", + "number of hosts", + "number of sources", + ], + ), + trigger_alert_when_condition=dict( + type="str", + required=False, + default="greater than", + choices=[ + "greater than", + "less than", + "equal to", + "not equal to", + "drops by", + "rises by", + ], + ), + trigger_alert_when_value=dict( + type="str", required=False, default="10" + ), + throttle_window_duration=dict(type="str", required=False), + throttle_fields_to_group_by=dict(type="str", required=False), + suppress_alerts=dict(type="bool", required=False, default=False), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + if module.params["state"] in ["present", "enabled"]: + module_disabled_state = False + else: + module_disabled_state = True + + splunk_request = SplunkRequest( + module, + override=False, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state"], + ) + + try: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + except HTTPError as e: + # the data monitor doesn't exist + query_dict = {} + + # Have to custom craft the data here because they overload the saved searches + # endpoint in the rest api and we want to hide the nuance from the user + request_post_data = {} + request_post_data["name"] = module.params["name"] + request_post_data["action.correlationsearch.enabled"] = "1" + request_post_data["is_scheduled"] = True + request_post_data["dispatch.rt_backfill"] = True + request_post_data["action.correlationsearch.label"] = module.params["name"] + request_post_data["description"] = module.params["description"] + request_post_data["search"] = module.params["search"] + request_post_data["request.ui_dispatch_app"] = module.params["app"] + if module.params["ui_dispatch_context"]: + request_post_data["request.ui_dispatch_context"] = module.params[ + "ui_dispatch_context" + ] + request_post_data["dispatch.earliest_time"] = module.params[ + "time_earliest" + ] + request_post_data["dispatch.latest_time"] = module.params["time_latest"] + request_post_data["cron_schedule"] = module.params["cron_schedule"] + if module.params["scheduling"] == "real-time": + request_post_data["realtime_schedule"] = True + else: + request_post_data["realtime_schedule"] = False + request_post_data["schedule_window"] = module.params["schedule_window"] + request_post_data["schedule_priority"] = module.params[ + "schedule_priority" + ].lower() + request_post_data["alert_type"] = module.params["trigger_alert_when"] + request_post_data["alert_comparator"] = module.params[ + "trigger_alert_when_condition" + ] + request_post_data["alert_threshold"] = module.params[ + "trigger_alert_when_value" + ] + request_post_data["alert.suppress"] = module.params["suppress_alerts"] + request_post_data["disabled"] = module_disabled_state + + request_post_data = utils.remove_empties(request_post_data) + + if module.params["state"] in ["present", "enabled", "disabled"]: + if query_dict: + needs_change = False + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_post_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled + del request_post_data[ + "name" + ] # If this is present, splunk assumes we're trying to create a new one wit the same name + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + # Create it + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches", + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + + elif module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "services/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py new file mode 100644 index 000000000..0ab756989 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py @@ -0,0 +1,80 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: correlation_search_info +short_description: Manage Splunk Enterprise Security Correlation Searches +description: + - This module allows for the query of Splunk Enterprise Security Correlation Searches +version_added: "1.0.0" +options: + name: + description: + - Name of coorelation search + required: false + type: str + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example usage of splunk.es.correlation_search_info + splunk.es.correlation_search_info: + name: "Name of correlation search" + register: scorrelation_search_info + +- name: debug display information gathered + debug: + var: scorrelation_search_info +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict(name=dict(required=False, type="str")) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/json"}, + ) + + if module.params["name"]: + try: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + except HTTPError as e: + # the data monitor doesn't exist + query_dict = {} + else: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches" + ) + + module.exit_json(changed=False, splunk_correlation_search_info=query_dict) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py new file mode 100644 index 000000000..080d23d3b --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py @@ -0,0 +1,264 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: data_input_monitor +short_description: Manage Splunk Data Inputs of type Monitor +description: + - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk. +version_added: "1.0.0" +deprecated: + alternative: splunk_data_inputs_monitor + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - The file or directory path to monitor on the system. + required: True + type: str + state: + description: + - Add or remove a data source. + required: True + choices: + - "present" + - "absent" + type: str + blacklist: + description: + - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed. + required: False + type: str + check_index: + description: + - If set to C(True), the index value is checked to ensure that it is the name of a valid index. + required: False + type: bool + default: False + check_path: + description: + - If set to C(True), the name value is checked to ensure that it exists. + required: False + type: bool + crc_salt: + description: + - A string that modifies the file tracking identity for files in this input. + The magic value <SOURCE> invokes special behavior (see admin documentation). + required: False + type: str + disabled: + description: + - Indicates if input monitoring is disabled. + required: False + default: False + type: bool + followTail: + description: + - If set to C(True), files that are seen for the first time is read from the end. + required: False + type: bool + default: False + host: + description: + - The value to populate in the host field for events from this data input. + required: False + type: str + host_regex: + description: + - Specify a regular expression for a file path. If the path for a file + matches this regular expression, the captured value is used to populate + the host field for events from this data input. The regular expression + must have one capture group. + required: False + type: str + host_segment: + description: + - Use the specified slash-separate segment of the filepath as the host field value. + required: False + type: int + ignore_older_than: + description: + - Specify a time value. If the modification time of a file being monitored + falls outside of this rolling time window, the file is no longer being monitored. + required: False + type: str + index: + description: + - Which index events from this input should be stored in. Defaults to default. + required: False + type: str + recursive: + description: + - Setting this to False prevents monitoring of any subdirectories encountered within this data input. + required: False + type: bool + default: False + rename_source: + description: + - The value to populate in the source field for events from this data input. + The same source should not be used for multiple data inputs. + required: False + type: str + sourcetype: + description: + - The value to populate in the sourcetype field for incoming events. + required: False + type: str + time_before_close: + description: + - When Splunk software reaches the end of a file that is being read, the + file is kept open for a minimum of the number of seconds specified in + this value. After this period has elapsed, the file is checked again for + more data. + required: False + type: int + whitelist: + description: + - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed. + required: False + type: str +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +- name: Example adding data input monitor with splunk.es.data_input_monitor + splunk.es.data_input_monitor: + name: "/var/log/example.log" + state: "present" + recursive: True +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + name=dict(required=True, type="str"), + state=dict(choices=["present", "absent"], required=True), + blacklist=dict(required=False, type="str", default=None), + check_index=dict(required=False, type="bool", default=False), + check_path=dict(required=False, type="bool", default=None), + crc_salt=dict(required=False, type="str", default=None), + disabled=dict(required=False, type="bool", default=False), + followTail=dict(required=False, type="bool", default=False), + host=dict(required=False, type="str", default=None), + host_segment=dict(required=False, type="int", default=None), + host_regex=dict(required=False, type="str", default=None), + ignore_older_than=dict(required=False, type="str", default=None), + index=dict(required=False, type="str", default=None), + recursive=dict(required=False, type="bool", default=False), + rename_source=dict(required=False, type="str", default=None), + sourcetype=dict(required=False, type="str", default=None), + time_before_close=dict(required=False, type="int", default=None), + whitelist=dict(required=False, type="str", default=None), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + # map of keys for the splunk REST API that aren't pythonic so we have to + # handle the substitutes + keymap = { + "check_index": "check-index", + "check_path": "check-path", + "crc_salt": "crc-salt", + "ignore_older_than": "ignore-older-than", + "rename_source": "rename-source", + "time_before_close": "time-before-close", + } + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + keymap=keymap, + not_rest_data_keys=["state"], + ) + # This is where the splunk_* args are processed + request_data = splunk_request.get_data() + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + query_dict = utils.remove_empties(query_dict) + + if module.params["state"] == "present": + if query_dict: + needs_change = False + for arg in request_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + # Create it + _data = splunk_request.get_data() + _data["name"] = module.params["name"] + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/monitor", + data=_data, + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + + if module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_network.py b/ansible_collections/splunk/es/plugins/modules/data_input_network.py new file mode 100644 index 000000000..5771eb9cc --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/data_input_network.py @@ -0,0 +1,276 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: data_input_network +short_description: Manage Splunk Data Inputs of type TCP or UDP +description: + - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk. +version_added: "1.0.0" +deprecated: + alternative: splunk_data_inputs_network + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + protocol: + description: + - Choose between tcp or udp + required: True + choices: + - 'tcp' + - 'udp' + type: str + connection_host: + description: + - Set the host for the remote server that is sending data. + - C(ip) sets the host to the IP address of the remote server sending data. + - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data. + - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname. + default: "ip" + required: False + type: str + choices: + - "ip" + - "dns" + - "none" + state: + description: + - Enable, disable, create, or destroy + choices: + - "present" + - "absent" + - "enabled" + - "disable" + required: False + default: "present" + type: str + datatype: + description: > + Forwarders can transmit three types of data: raw, unparsed, or parsed. + C(cooked) data refers to parsed and unparsed formats. + choices: + - "cooked" + - "raw" + default: "raw" + required: False + type: str + host: + description: + - Host from which the indexer gets data. + required: False + type: str + index: + description: + - default Index to store generated events. + type: str + name: + description: + - The input port which receives raw data. + required: True + type: str + queue: + description: + - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue. + - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more + information about props.conf and rules for timestamping and linebreaking, refer to props.conf and + the online documentation at "Monitor files and directories with inputs.conf" + - Set queue to indexQueue to send your data directly into the index. + choices: + - "parsingQueue" + - "indexQueue" + type: str + required: False + default: "parsingQueue" + rawTcpDoneTimeout: + description: + - Specifies in seconds the timeout value for adding a Done-key. + - If a connection over the port specified by name remains idle after receiving data for specified + number of seconds, it adds a Done-key. This implies the last event is completely received. + default: 10 + type: int + required: False + restrictToHost: + description: + - Allows for restricting this input to only accept data from the host specified here. + required: False + type: str + ssl: + description: + - Enable or disble ssl for the data stream + required: False + type: bool + source: + description: + - Sets the source key/field for events from this input. Defaults to the input file path. + - > + Sets the source key initial value. The key is used during parsing/indexing, in particular to set + the source field during indexing. It is also the source field used at search time. As a convenience, + the chosen string is prepended with 'source::'. + - > + Note: Overriding the source key is generally not recommended. Typically, the input layer provides a + more accurate string to aid in problem analysis and investigation, accurately recording the file from + which the data was retrieved. Consider use of source types, tagging, and search wildcards before + overriding this value. + type: str + sourcetype: + description: + - Set the source type for events from this input. + - '"sourcetype=" is automatically prepended to <string>.' + - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False). + type: str +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +- name: Example adding data input network with splunk.es.data_input_network + splunk.es.data_input_network: + name: "8099" + protocol: "tcp" + state: "present" +""" + + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + state=dict( + required=False, + choices=["present", "absent", "enabled", "disable"], + default="present", + type="str", + ), + connection_host=dict( + required=False, + choices=["ip", "dns", "none"], + default="ip", + type="str", + ), + host=dict(required=False, type="str", default=None), + index=dict(required=False, type="str", default=None), + name=dict(required=True, type="str"), + protocol=dict(required=True, type="str", choices=["tcp", "udp"]), + queue=dict( + required=False, + type="str", + choices=["parsingQueue", "indexQueue"], + default="parsingQueue", + ), + rawTcpDoneTimeout=dict(required=False, type="int", default=10), + restrictToHost=dict(required=False, type="str", default=None), + ssl=dict(required=False, type="bool", default=None), + source=dict(required=False, type="str", default=None), + sourcetype=dict(required=False, type="str", default=None), + datatype=dict( + required=False, choices=["cooked", "raw"], default="raw" + ), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state", "datatype", "protocol"], + ) + # This is where the splunk_* args are processed + request_data = splunk_request.get_data() + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ) + ) + + if module.params["state"] in ["present", "enabled", "disabled"]: + _data = splunk_request.get_data() + if module.params["state"] in ["present", "enabled"]: + _data["disabled"] = False + else: + _data["disabled"] = True + if query_dict: + needs_change = False + for arg in request_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ), + data=_data, + ) + if module.params["state"] in ["present", "enabled"]: + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + module.exit_json( + changed=True, msg="{0} disabled.", splunk_data=splunk_data + ) + else: + # Create it + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/{0}/{1}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + ), + data=_data, + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + elif module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json(changed=False, msg="Nothing to do.", splunk_data={}) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py new file mode 100644 index 000000000..29099424e --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py @@ -0,0 +1,462 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: adaptive_response_notable_event +short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses +description: + - This module allows for creation, deletion, and modification of Splunk + Enterprise Security Notable Event Adaptive Responses that are associated + with a correlation search +version_added: "1.0.0" +deprecated: + alternative: splunk_adaptive_response_notable_events + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - Name of notable event + required: true + type: str + correlation_search_name: + description: + - Name of correlation search to associate this notable event adaptive response with + required: true + type: str + description: + description: + - Description of the notable event, this will populate the description field for the web console + required: true + type: str + state: + description: + - Add or remove a data source. + required: true + choices: [ "present", "absent" ] + type: str + security_domain: + description: + - Splunk Security Domain + type: str + required: False + choices: + - "access" + - "endpoint" + - "network" + - "threat" + - "identity" + - "audit" + default: "threat" + severity: + description: + - Severity rating + type: str + required: False + choices: + - "informational" + - "low" + - "medium" + - "high" + - "critical" + - "unknown" + default: "high" + default_owner: + description: + - Default owner of the notable event, if unset it will default to Splunk System Defaults + type: str + required: False + default_status: + description: + - Default status of the notable event, if unset it will default to Splunk System Defaults + type: str + required: False + choices: + - "unassigned" + - "new" + - "in progress" + - "pending" + - "resolved" + - "closed" + drill_down_name: + description: + - Name for drill down search, Supports variable substitution with fields from the matching event. + type: str + required: False + drill_down_search: + description: + - Drill down search, Supports variable substitution with fields from the matching event. + type: str + required: False + drill_down_earliest_offset: + description: + - Set the amount of time before the triggering event to search for related + events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time + to match the earliest time of the search + type: str + required: False + default: \"$info_min_time$\" + drill_down_latest_offset: + description: + - Set the amount of time after the triggering event to search for related + events. For example, 1m. Use \"$info_max_time$\" to set the drill-down + time to match the latest time of the search + type: str + required: False + default: \"$info_max_time$\" + investigation_profiles: + description: + - Investigation profile to assiciate the notable event with. + type: str + required: False + next_steps: + description: + - List of adaptive responses that should be run next + - Describe next steps and response actions that an analyst could take to address this threat. + type: list + elements: str + required: False + recommended_actions: + description: + - List of adaptive responses that are recommended to be run next + - Identifying Recommended Adaptive Responses will highlight those actions + for the analyst when looking at the list of response actions available, + making it easier to find them among the longer list of available actions. + type: list + elements: str + required: False + asset_extraction: + description: + - list of assets to extract, select any one or many of the available choices + - defaults to all available choices + type: list + elements: str + choices: + - src + - dest + - dvc + - orig_host + default: + - src + - dest + - dvc + - orig_host + required: False + identity_extraction: + description: + - list of identity fields to extract, select any one or many of the available choices + - defaults to all available choices + type: list + elements: str + choices: + - user + - src_user + default: + - user + - src_user + required: False + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example of using splunk.es.adaptive_response_notable_event module + splunk.es.adaptive_response_notable_event: + name: "Example notable event from Ansible" + correlation_search_name: "Example Correlation Search From Ansible" + description: "Example notable event from Ansible, description." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + - ansiblesecurityautomation +""" + +import json + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + name=dict(required=True, type="str"), + correlation_search_name=dict(required=True, type="str"), + description=dict(required=True, type="str"), + state=dict(choices=["present", "absent"], required=True), + security_domain=dict( + choices=[ + "access", + "endpoint", + "network", + "threat", + "identity", + "audit", + ], + required=False, + default="threat", + ), + severity=dict( + choices=[ + "informational", + "low", + "medium", + "high", + "critical", + "unknown", + ], + required=False, + default="high", + ), + default_owner=dict(required=False, type="str"), + default_status=dict( + choices=[ + "unassigned", + "new", + "in progress", + "pending", + "resolved", + "closed", + ], + required=False, + ), + drill_down_name=dict(required=False, type="str"), + drill_down_search=dict(required=False, type="str"), + drill_down_earliest_offset=dict( + required=False, type="str", default="$info_min_time$" + ), + drill_down_latest_offset=dict( + required=False, type="str", default="$info_max_time$" + ), + investigation_profiles=dict(required=False, type="str"), + next_steps=dict( + required=False, type="list", elements="str", default=[] + ), + recommended_actions=dict( + required=False, type="list", elements="str", default=[] + ), + asset_extraction=dict( + required=False, + type="list", + elements="str", + default=["src", "dest", "dvc", "orig_host"], + choices=["src", "dest", "dvc", "orig_host"], + ), + identity_extraction=dict( + required=False, + type="list", + elements="str", + default=["user", "src_user"], + choices=["user", "src_user"], + ), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + override=False, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state"], + ) + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ) + ) + + # Have to custom craft the data here because they overload the saved searches + # endpoint in the rest api and we want to hide the nuance from the user + request_post_data = {} + + # FIXME need to figure out how to properly support these, the possible values appear to + # be dynamically created based on what the search is indexing + # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]' + # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"] + if module.params["next_steps"]: + if len(module.params["next_steps"]) == 1: + next_steps = "[[action|{0}]]".format( + module.params["next_steps"][0] + ) + else: + next_steps = "" + for next_step in module.params["next_steps"]: + if next_steps: + next_steps += "\n[[action|{0}]]".format(next_step) + else: + next_steps = "[[action|{0}]]".format(next_step) + + # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI + # but I don't know what it is/means because there's no docs on it + next_steps_dict = {"version": 1, "data": next_steps} + request_post_data["action.notable.param.next_steps"] = json.dumps( + next_steps_dict + ) + + if module.params["recommended_actions"]: + if len(module.params["recommended_actions"]) == 1: + request_post_data[ + "action.notable.param.recommended_actions" + ] = module.params["recommended_actions"][0] + else: + request_post_data[ + "action.notable.param.recommended_actions" + ] = ",".join(module.params["recommended_actions"]) + + request_post_data["action.notable.param.rule_description"] = module.params[ + "description" + ] + request_post_data["action.notable.param.rule_title"] = module.params[ + "name" + ] + request_post_data["action.notable.param.security_domain"] = module.params[ + "security_domain" + ] + request_post_data["action.notable.param.severity"] = module.params[ + "severity" + ] + request_post_data["action.notable.param.asset_extraction"] = module.params[ + "asset_extraction" + ] + request_post_data[ + "action.notable.param.identity_extraction" + ] = module.params["identity_extraction"] + + # NOTE: this field appears to be hard coded when you create this via the splunk web UI + # but I don't know what it is/means because there's no docs on it + request_post_data["action.notable.param.verbose"] = "0" + + if module.params["default_owner"]: + request_post_data[ + "action.notable.param.default_owner" + ] = module.params["default_owner"] + + if module.params["default_status"]: + request_post_data[ + "action.notable.param.default_status" + ] = module.params["default_status"] + + request_post_data = utils.remove_empties(request_post_data) + + if query_dict: + request_post_data["search"] = query_dict["entry"][0]["content"][ + "search" + ] + if "actions" in query_dict["entry"][0]["content"]: + if query_dict["entry"][0]["content"]["actions"] == "notable": + pass + elif ( + len(query_dict["entry"][0]["content"]["actions"].split(",")) + > 0 + and "notable" + not in query_dict["entry"][0]["content"]["actions"] + ): + request_post_data["actions"] = ( + query_dict["entry"][0]["content"]["actions"] + ", notable" + ) + else: + request_post_data["actions"] = "notable" + else: + module.fail_json( + msg="Unable to find correlation search: {0}", + splunk_data=query_dict, + ) + + if module.params["state"] == "present": + needs_change = False + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + if to_text(query_dict["entry"][0]["content"][arg]) != to_text( + request_post_data[arg] + ): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, + msg="{0} updated.".format( + module.params["correlation_search_name"] + ), + splunk_data=splunk_data, + ) + + if module.params["state"] == "absent": + # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + needs_change = True + del query_dict["entry"][0]["content"][arg] + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["correlation_search_name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, + msg="{0} updated.".format( + module.params["correlation_search_name"] + ), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py new file mode 100644 index 000000000..fa680a511 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py @@ -0,0 +1,512 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright 2022 Red Hat +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: splunk_adaptive_response_notable_events +short_description: Manage Adaptive Responses notable events resource module +description: + - This module allows for creation, deletion, and modification of Splunk + Enterprise Security Notable Event Adaptive Responses that are associated + with a correlation search + - Tested against Splunk Enterprise Server 8.2.3 +version_added: "2.1.0" +options: + config: + description: + - Configure file and directory monitoring on the system + type: list + elements: dict + suboptions: + name: + description: + - Name of notable event + type: str + correlation_search_name: + description: + - Name of correlation search to associate this notable event adaptive response with + required: true + type: str + description: + description: + - Description of the notable event, this will populate the description field for the web console + type: str + security_domain: + description: + - Splunk Security Domain + type: str + choices: + - "access" + - "endpoint" + - "network" + - "threat" + - "identity" + - "audit" + default: "threat" + severity: + description: + - Severity rating + type: str + choices: + - "informational" + - "low" + - "medium" + - "high" + - "critical" + - "unknown" + default: "high" + default_owner: + description: + - Default owner of the notable event, if unset it will default to Splunk System Defaults + type: str + default_status: + description: + - Default status of the notable event, if unset it will default to Splunk System Defaults + type: str + choices: + - "unassigned" + - "new" + - "in progress" + - "pending" + - "resolved" + - "closed" + drilldown_name: + description: + - Name for drill down search, Supports variable substitution with fields from the matching event. + type: str + drilldown_search: + description: + - Drill down search, Supports variable substitution with fields from the matching event. + type: str + drilldown_earliest_offset: + description: + - Set the amount of time before the triggering event to search for related + events. For example, 2h. Use '$info_min_time$' to set the drill-down time + to match the earliest time of the search + type: str + default: '$info_min_time$' + drilldown_latest_offset: + description: + - Set the amount of time after the triggering event to search for related + events. For example, 1m. Use '$info_max_time$' to set the drill-down + time to match the latest time of the search + type: str + default: '$info_max_time$' + investigation_profiles: + description: + - Investigation profile to associate the notable event with. + type: list + elements: str + next_steps: + description: + - List of adaptive responses that should be run next + - Describe next steps and response actions that an analyst could take to address this threat. + type: list + elements: str + recommended_actions: + description: + - List of adaptive responses that are recommended to be run next + - Identifying Recommended Adaptive Responses will highlight those actions + for the analyst when looking at the list of response actions available, + making it easier to find them among the longer list of available actions. + type: list + elements: str + extract_artifacts: + description: + - Assets and identities to be extracted + type: dict + suboptions: + asset: + description: + - list of assets to extract, select any one or many of the available choices + - defaults to all available choices + type: list + elements: str + choices: + - src + - dest + - dvc + - orig_host + file: + description: + - list of files to extract + type: list + elements: str + identity: + description: + - list of identity fields to extract, select any one or many of the available choices + - defaults to 'user' and 'src_user' + type: list + elements: str + choices: + - user + - src_user + - src_user_id + - user_id + - src_user_role + - user_role + - vendor_account + url: + description: + - list of URLs to extract + type: list + elements: str + running_config: + description: + - The module, by default, will connect to the remote device and retrieve the current + running-config to use as a base for comparing against the contents of source. + There are times when it is not desirable to have the task get the current running-config + for every task in a playbook. The I(running_config) argument allows the implementer + to pass in the configuration to use as the base config for comparison. This + value of this option should be the output received from device by executing + command. + type: str + state: + description: + - The state the configuration should be left in + type: str + choices: + - merged + - replaced + - deleted + - gathered + default: merged + +author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +# Using gathered +# -------------- + +- name: Gather adaptive response notable events config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + - correlation_search_name: Ansible Test 2 + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "correlation_search_name": "Ansible Test", +# "description": "test notable event", +# "drilldown_earliest_offset": "$info_min_time$", +# "drilldown_latest_offset": "$info_max_time$", +# "drilldown_name": "test_drill_name", +# "drilldown_search": "test_drill", +# "extract_artifacts": { +# "asset": [ +# "src", +# "dest", +# "dvc", +# "orig_host" +# ], +# "identity": [ +# "src_user", +# "user", +# "src_user_id", +# "src_user_role", +# "user_id", +# "user_role", +# "vendor_account" +# ] +# }, +# "investigation_profiles": [ +# "test profile 1", +# "test profile 2", +# "test profile 3" +# ], +# "next_steps": [ +# "makestreams", +# "nbtstat", +# "nslookup" +# ], +# "name": "ansible_test_notable", +# "recommended_actions": [ +# "email", +# "logevent", +# "makestreams", +# "nbtstat" +# ], +# "security_domain": "threat", +# "severity": "high" +# }, +# { } # there is no configuration associated with "/var" +# ] + +# Using merged +# ------------ + +- name: Example to add config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "correlation_search_name": "Ansible Test", +# "description": "test notable event", +# "drilldown_earliest_offset": "$info_min_time$", +# "drilldown_latest_offset": "$info_max_time$", +# "drilldown_name": "test_drill_name", +# "drilldown_search": "test_drill", +# "extract_artifacts": { +# "asset": [ +# "src", +# "dest", +# "dvc", +# "orig_host" +# ], +# "identity": [ +# "src_user", +# "user", +# "src_user_id", +# "src_user_role", +# "user_id", +# "user_role", +# "vendor_account" +# ] +# }, +# "investigation_profiles": [ +# "test profile 1", +# "test profile 2", +# "test profile 3" +# ], +# "next_steps": [ +# "makestreams", +# "nbtstat", +# "nslookup" +# ], +# "name": "ansible_test_notable", +# "recommended_actions": [ +# "email", +# "logevent", +# "makestreams", +# "nbtstat" +# ], +# "security_domain": "threat", +# "severity": "high" +# } +# ], +# "before": [], + +# Using replaced +# -------------- + +- name: Example to Replace the config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + state: replaced + +# RUN output: +# ----------- + +# "after": [ +# { +# "correlation_search_name": "Ansible Test", +# "description": "test notable event", +# "drilldown_earliest_offset": "$info_min_time$", +# "drilldown_latest_offset": "$info_max_time$", +# "extract_artifacts": { +# "asset": [ +# "src", +# "dest" +# ], +# "identity": [ +# "src_user", +# "user", +# "src_user_id" +# ] +# }, +# "next_steps": [ +# "makestreams" +# ], +# "name": "ansible_test_notable", +# "recommended_actions": [ +# "email", +# "logevent" +# ], +# "security_domain": "threat", +# "severity": "high" +# } +# ], +# "before": [ +# { +# "correlation_search_name": "Ansible Test", +# "description": "test notable event", +# "drilldown_earliest_offset": "$info_min_time$", +# "drilldown_latest_offset": "$info_max_time$", +# "drilldown_name": "test_drill_name", +# "drilldown_search": "test_drill", +# "extract_artifacts": { +# "asset": [ +# "src", +# "dest", +# "dvc", +# "orig_host" +# ], +# "identity": [ +# "src_user", +# "user", +# "src_user_id", +# "src_user_role", +# "user_id", +# "user_role", +# "vendor_account" +# ] +# }, +# "investigation_profiles": [ +# "test profile 1", +# "test profile 2", +# "test profile 3" +# ], +# "next_steps": [ +# "makestreams", +# "nbtstat", +# "nslookup" +# ], +# "name": "ansible_test_notable", +# "recommended_actions": [ +# "email", +# "logevent", +# "makestreams", +# "nbtstat" +# ], +# "security_domain": "threat", +# "severity": "high" +# } +# ], + +# USING DELETED +# ------------- + +- name: Example to remove the config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + state: deleted + +# RUN output: +# ----------- + +# "after": [], +# "before": [ +# { +# "correlation_search_name": "Ansible Test", +# "description": "test notable event", +# "drilldown_earliest_offset": "$info_min_time$", +# "drilldown_latest_offset": "$info_max_time$", +# "drilldown_name": "test_drill_name", +# "drilldown_search": "test_drill", +# "extract_artifacts": { +# "asset": [ +# "src", +# "dest", +# "dvc", +# "orig_host" +# ], +# "identity": [ +# "src_user", +# "user", +# "src_user_id", +# "src_user_role", +# "user_id", +# "user_role", +# "vendor_account" +# ] +# }, +# "investigation_profiles": [ +# "test profile 1", +# "test profile 2", +# "test profile 3" +# ], +# "next_steps": [ +# "makestreams", +# "nbtstat", +# "nslookup" +# ], +# "name": "ansible_test_notable", +# "recommended_actions": [ +# "email", +# "logevent", +# "makestreams", +# "nbtstat" +# ], +# "security_domain": "threat", +# "severity": "high" +# } +# ] +""" + +RETURN = """ +before: + description: The configuration as structured data prior to module invocation. + returned: always + type: list + sample: The configuration returned will always be in the same format of the parameters above. +after: + description: The configuration as structured data after module completion. + returned: when changed + type: list + sample: The configuration returned will always be in the same format of the parameters above. +gathered: + description: Facts about the network resource gathered from the remote device as structured data. + returned: when state is I(gathered) + type: dict + sample: > + This output will always be in the same format as the + module argspec. +""" diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py new file mode 100644 index 000000000..9c865507b --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py @@ -0,0 +1,376 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: correlation_search +short_description: Manage Splunk Enterprise Security Correlation Searches +description: + - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches +version_added: "1.0.0" +deprecated: + alternative: splunk_correlation_searches + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - Name of coorelation search + required: True + type: str + description: + description: + - Description of the coorelation search, this will populate the description field for the web console + required: True + type: str + state: + description: + - Add, remove, enable, or disiable a correlation search. + required: True + choices: [ "present", "absent", "enabled", "disabled" ] + type: str + search: + description: + - SPL search string + type: str + required: True + app: + description: + - Splunk app to associate the correlation seach with + type: str + required: False + default: "SplunkEnterpriseSecuritySuite" + ui_dispatch_context: + description: + - Set an app to use for links such as the drill-down search in a notable + event or links in an email adaptive response action. If None, uses the + Application Context. + type: str + required: False + time_earliest: + description: + - Earliest time using relative time modifiers. + type: str + required: False + default: "-24h" + time_latest: + description: + - Latest time using relative time modifiers. + type: str + required: False + default: "now" + cron_schedule: + description: + - Enter a cron-style schedule. + - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM). + - Real-time searches use a default schedule of C('*/5 * * * *'). + type: str + required: False + default: "*/5 * * * *" + scheduling: + description: + - Controls the way the scheduler computes the next execution time of a scheduled search. + - > + Learn more: + https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling + type: str + required: False + default: "real-time" + choices: + - "real-time" + - "continuous" + schedule_window: + description: + - Let report run at any time within a window that opens at its scheduled run time, + to improve efficiency when there are many concurrently scheduled reports. + The "auto" setting automatically determines the best window width for the report. + type: str + required: False + default: "0" + schedule_priority: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + required: False + default: "Default" + choices: + - "Default" + - "Higher" + - "Highest" + trigger_alert_when: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + required: False + default: "number of events" + choices: + - "number of events" + - "number of results" + - "number of hosts" + - "number of sources" + trigger_alert_when_condition: + description: + - Conditional to pass to C(trigger_alert_when) + type: str + required: False + default: "greater than" + choices: + - "greater than" + - "less than" + - "equal to" + - "not equal to" + - "drops by" + - "rises by" + trigger_alert_when_value: + description: + - Value to pass to C(trigger_alert_when) + type: str + required: False + default: "10" + throttle_window_duration: + description: + - "How much time to ignore other events that match the field values specified in Fields to group by." + type: str + required: False + throttle_fields_to_group_by: + description: + - "Type the fields to consider for matching events for throttling." + type: str + required: False + suppress_alerts: + description: + - "To suppress alerts from this correlation search or not" + type: bool + required: False + default: False +notes: + - > + The following options are not yet supported: + throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example of creating a correlation search with splunk.es.coorelation_search + splunk.es.correlation_search: + name: "Example Coorelation Search From Ansible" + description: "Example Coorelation Search From Ansible, description." + search: 'source="/var/log/snort.log"' + state: "present" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text + +from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus +from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + argspec = dict( + name=dict(required=True, type="str"), + description=dict(required=True, type="str"), + state=dict( + choices=["present", "absent", "enabled", "disabled"], required=True + ), + search=dict(required=True, type="str"), + app=dict( + type="str", required=False, default="SplunkEnterpriseSecuritySuite" + ), + ui_dispatch_context=dict(type="str", required=False), + time_earliest=dict(type="str", required=False, default="-24h"), + time_latest=dict(type="str", required=False, default="now"), + cron_schedule=dict(type="str", required=False, default="*/5 * * * *"), + scheduling=dict( + type="str", + required=False, + default="real-time", + choices=["real-time", "continuous"], + ), + schedule_window=dict(type="str", required=False, default="0"), + schedule_priority=dict( + type="str", + required=False, + default="Default", + choices=["Default", "Higher", "Highest"], + ), + trigger_alert_when=dict( + type="str", + required=False, + default="number of events", + choices=[ + "number of events", + "number of results", + "number of hosts", + "number of sources", + ], + ), + trigger_alert_when_condition=dict( + type="str", + required=False, + default="greater than", + choices=[ + "greater than", + "less than", + "equal to", + "not equal to", + "drops by", + "rises by", + ], + ), + trigger_alert_when_value=dict( + type="str", required=False, default="10" + ), + throttle_window_duration=dict(type="str", required=False), + throttle_fields_to_group_by=dict(type="str", required=False), + suppress_alerts=dict(type="bool", required=False, default=False), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + if module.params["state"] in ["present", "enabled"]: + module_disabled_state = False + else: + module_disabled_state = True + + splunk_request = SplunkRequest( + module, + override=False, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state"], + ) + + try: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + except HTTPError as e: + # the data monitor doesn't exist + query_dict = {} + + # Have to custom craft the data here because they overload the saved searches + # endpoint in the rest api and we want to hide the nuance from the user + request_post_data = {} + request_post_data["name"] = module.params["name"] + request_post_data["action.correlationsearch.enabled"] = "1" + request_post_data["is_scheduled"] = True + request_post_data["dispatch.rt_backfill"] = True + request_post_data["action.correlationsearch.label"] = module.params["name"] + request_post_data["description"] = module.params["description"] + request_post_data["search"] = module.params["search"] + request_post_data["request.ui_dispatch_app"] = module.params["app"] + if module.params["ui_dispatch_context"]: + request_post_data["request.ui_dispatch_context"] = module.params[ + "ui_dispatch_context" + ] + request_post_data["dispatch.earliest_time"] = module.params[ + "time_earliest" + ] + request_post_data["dispatch.latest_time"] = module.params["time_latest"] + request_post_data["cron_schedule"] = module.params["cron_schedule"] + if module.params["scheduling"] == "real-time": + request_post_data["realtime_schedule"] = True + else: + request_post_data["realtime_schedule"] = False + request_post_data["schedule_window"] = module.params["schedule_window"] + request_post_data["schedule_priority"] = module.params[ + "schedule_priority" + ].lower() + request_post_data["alert_type"] = module.params["trigger_alert_when"] + request_post_data["alert_comparator"] = module.params[ + "trigger_alert_when_condition" + ] + request_post_data["alert_threshold"] = module.params[ + "trigger_alert_when_value" + ] + request_post_data["alert.suppress"] = module.params["suppress_alerts"] + request_post_data["disabled"] = module_disabled_state + + request_post_data = utils.remove_empties(request_post_data) + + if module.params["state"] in ["present", "enabled", "disabled"]: + if query_dict: + needs_change = False + for arg in request_post_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_post_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled + del request_post_data[ + "name" + ] # If this is present, splunk assumes we're trying to create a new one wit the same name + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ), + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + # Create it + splunk_data = splunk_request.create_update( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches", + data=urlencode(request_post_data), + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + + elif module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "services/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py new file mode 100644 index 000000000..0ab756989 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py @@ -0,0 +1,80 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: correlation_search_info +short_description: Manage Splunk Enterprise Security Correlation Searches +description: + - This module allows for the query of Splunk Enterprise Security Correlation Searches +version_added: "1.0.0" +options: + name: + description: + - Name of coorelation search + required: false + type: str + +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" +# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role + +EXAMPLES = """ +- name: Example usage of splunk.es.correlation_search_info + splunk.es.correlation_search_info: + name: "Name of correlation search" + register: scorrelation_search_info + +- name: debug display information gathered + debug: + var: scorrelation_search_info +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict(name=dict(required=False, type="str")) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/json"}, + ) + + if module.params["name"]: + try: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format( + quote_plus(module.params["name"]) + ) + ) + except HTTPError as e: + # the data monitor doesn't exist + query_dict = {} + else: + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches" + ) + + module.exit_json(changed=False, splunk_correlation_search_info=query_dict) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py new file mode 100644 index 000000000..ac834d1b9 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py @@ -0,0 +1,630 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright 2022 Red Hat +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: splunk_correlation_searches +short_description: Splunk Enterprise Security Correlation searches resource module +description: + - This module allows for creation, deletion, and modification of Splunk + Enterprise Security correlation searches + - Tested against Splunk Enterprise Server v8.2.3 with Splunk Enterprise Security v7.0.1 + installed on it. +version_added: "2.1.0" +options: + config: + description: + - Configure file and directory monitoring on the system + type: list + elements: dict + suboptions: + name: + description: + - Name of correlation search + type: str + required: True + disabled: + description: + - Disable correlation search + type: bool + default: False + description: + description: + - Description of the coorelation search, this will populate the description field for the web console + type: str + search: + description: + - SPL search string + type: str + app: + description: + - Splunk app to associate the correlation seach with + type: str + default: "SplunkEnterpriseSecuritySuite" + annotations: + description: + - Add context from industry standard cyber security mappings in Splunk Enterprise Security + or custom annotations + type: dict + suboptions: + cis20: + description: + - Specify CIS20 annotations + type: list + elements: str + kill_chain_phases: + description: + - Specify Kill 10 annotations + type: list + elements: str + mitre_attack: + description: + - Specify MITRE ATTACK annotations + type: list + elements: str + nist: + description: + - Specify NIST annotations + type: list + elements: str + custom: + description: + - Specify custom framework and custom annotations + type: list + elements: dict + suboptions: + framework: + description: + - Specify annotation framework + type: str + custom_annotations: + description: + - Specify annotations associated with custom framework + type: list + elements: str + ui_dispatch_context: + description: + - Set an app to use for links such as the drill-down search in a notable + event or links in an email adaptive response action. If None, uses the + Application Context. + type: str + time_earliest: + description: + - Earliest time using relative time modifiers. + type: str + default: "-24h" + time_latest: + description: + - Latest time using relative time modifiers. + type: str + default: "now" + cron_schedule: + description: + - Enter a cron-style schedule. + - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM). + - Real-time searches use a default schedule of C('*/5 * * * *'). + type: str + default: "*/5 * * * *" + scheduling: + description: + - Controls the way the scheduler computes the next execution time of a scheduled search. + - > + Learn more: + https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling + type: str + default: "realtime" + choices: + - "realtime" + - "continuous" + schedule_window: + description: + - Let report run at any time within a window that opens at its scheduled run time, + to improve efficiency when there are many concurrently scheduled reports. + The "auto" setting automatically determines the best window width for the report. + type: str + default: "0" + schedule_priority: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + default: "default" + choices: + - "default" + - "higher" + - "highest" + trigger_alert: + description: + - Notable response actions and risk response actions are always triggered for each result. + Choose whether the trigger is activated once or for each result. + type: str + default: "once" + choices: + - "once" + - "for each result" + trigger_alert_when: + description: + - Raise the scheduling priority of a report. Set to "Higher" to prioritize + it above other searches of the same scheduling mode, or "Highest" to + prioritize it above other searches regardless of mode. Use with discretion. + type: str + default: "number of events" + choices: + - "number of events" + - "number of results" + - "number of hosts" + - "number of sources" + trigger_alert_when_condition: + description: + - Conditional to pass to C(trigger_alert_when) + type: str + default: "greater than" + choices: + - "greater than" + - "less than" + - "equal to" + - "not equal to" + - "drops by" + - "rises by" + trigger_alert_when_value: + description: + - Value to pass to C(trigger_alert_when) + type: str + default: "10" + throttle_window_duration: + description: + - How much time to ignore other events that match the field values specified in Fields to group by. + type: str + throttle_fields_to_group_by: + description: + - Type the fields to consider for matching events for throttling. + type: list + elements: str + suppress_alerts: + description: + - To suppress alerts from this correlation search or not + type: bool + default: False + running_config: + description: + - The module, by default, will connect to the remote device and retrieve the current + running-config to use as a base for comparing against the contents of source. + There are times when it is not desirable to have the task get the current running-config + for every task in a playbook. The I(running_config) argument allows the implementer + to pass in the configuration to use as the base config for comparison. This + value of this option should be the output received from device by executing + command. + type: str + state: + description: + - The state the configuration should be left in + type: str + choices: + - merged + - replaced + - deleted + - gathered + default: merged + +author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +# Using gathered +# -------------- + +- name: Gather correlation searches config + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + - name: Ansible Test 2 + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "annotations": { +# "cis20": [ +# "test1" +# ], +# "custom": [ +# { +# "custom_annotations": [ +# "test5" +# ], +# "framework": "test_framework" +# } +# ], +# "kill_chain_phases": [ +# "test3" +# ], +# "mitre_attack": [ +# "test2" +# ], +# "nist": [ +# "test4" +# ] +# }, +# "app": "DA-ESS-EndpointProtection", +# "cron_schedule": "*/5 * * * *", +# "description": "test description", +# "disabled": false, +# "name": "Ansible Test", +# "schedule_priority": "default", +# "schedule_window": "0", +# "scheduling": "realtime", +# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' +# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' +# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' +# 'n.src\" as \"src\" | where \"count\">=6', +# "suppress_alerts": false, +# "throttle_fields_to_group_by": [ +# "test_field1" +# ], +# "throttle_window_duration": "5s", +# "time_earliest": "-24h", +# "time_latest": "now", +# "trigger_alert": "once", +# "trigger_alert_when": "number of events", +# "trigger_alert_when_condition": "greater than", +# "trigger_alert_when_value": "10", +# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" +# } +# ] + +# Using merged +# ------------ + +- name: Merge and create new correlation searches configuration + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + disabled: false + description: test description + app: DA-ESS-EndpointProtection + annotations: + cis20: + - test1 + mitre_attack: + - test2 + kill_chain_phases: + - test3 + nist: + - test4 + custom: + - framework: test_framework + custom_annotations: + - test5 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: realtime + schedule_window: "0" + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: "10" + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + suppress_alerts: False + search: > + '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' + 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' + 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' + 'n.src\" as \"src\" | where \"count\">=6' + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "annotations": { +# "cis20": [ +# "test1" +# ], +# "custom": [ +# { +# "custom_annotations": [ +# "test5" +# ], +# "framework": "test_framework" +# } +# ], +# "kill_chain_phases": [ +# "test3" +# ], +# "mitre_attack": [ +# "test2" +# ], +# "nist": [ +# "test4" +# ] +# }, +# "app": "DA-ESS-EndpointProtection", +# "cron_schedule": "*/5 * * * *", +# "description": "test description", +# "disabled": false, +# "name": "Ansible Test", +# "schedule_priority": "default", +# "schedule_window": "0", +# "scheduling": "realtime", +# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' +# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' +# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' +# 'n.src\" as \"src\" | where \"count\">=6', +# "suppress_alerts": false, +# "throttle_fields_to_group_by": [ +# "test_field1" +# ], +# "throttle_window_duration": "5s", +# "time_earliest": "-24h", +# "time_latest": "now", +# "trigger_alert": "once", +# "trigger_alert_when": "number of events", +# "trigger_alert_when_condition": "greater than", +# "trigger_alert_when_value": "10", +# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" +# }, +# ], +# "before": [], + +# Using replaced +# -------------- + +- name: Replace existing correlation searches configuration + splunk.es.splunk_correlation_searches: + state: replaced + config: + - name: Ansible Test + disabled: false + description: test description + app: SplunkEnterpriseSecuritySuite + annotations: + cis20: + - test1 + - test2 + mitre_attack: + - test3 + - test4 + kill_chain_phases: + - test5 + - test6 + nist: + - test7 + - test8 + custom: + - framework: test_framework2 + custom_annotations: + - test9 + - test10 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: continuous + schedule_window: auto + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + - test_field2 + suppress_alerts: True + search: > + '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' + 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' + 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' + 'n.src\" as \"src\" | where \"count\">=6' + +# RUN output: +# ----------- + +# "after": [ +# { +# "annotations": { +# "cis20": [ +# "test1", +# "test2" +# ], +# "custom": [ +# { +# "custom_annotations": [ +# "test9", +# "test10" +# ], +# "framework": "test_framework2" +# } +# ], +# "kill_chain_phases": [ +# "test5", +# "test6" +# ], +# "mitre_attack": [ +# "test3", +# "test4" +# ], +# "nist": [ +# "test7", +# "test8" +# ] +# }, +# "app": "SplunkEnterpriseSecuritySuite", +# "cron_schedule": "*/5 * * * *", +# "description": "test description", +# "disabled": false, +# "name": "Ansible Test", +# "schedule_priority": "default", +# "schedule_window": "auto", +# "scheduling": "continuous", +# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' +# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' +# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' +# 'n.src\" as \"src\" | where \"count\">=6', +# "suppress_alerts": true, +# "throttle_fields_to_group_by": [ +# "test_field1", +# "test_field2" +# ], +# "throttle_window_duration": "5s", +# "time_earliest": "-24h", +# "time_latest": "now", +# "trigger_alert": "once", +# "trigger_alert_when": "number of events", +# "trigger_alert_when_condition": "greater than", +# "trigger_alert_when_value": "10", +# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" +# } +# ], +# "before": [ +# { +# "annotations": { +# "cis20": [ +# "test1" +# ], +# "custom": [ +# { +# "custom_annotations": [ +# "test5" +# ], +# "framework": "test_framework" +# } +# ], +# "kill_chain_phases": [ +# "test3" +# ], +# "mitre_attack": [ +# "test2" +# ], +# "nist": [ +# "test4" +# ] +# }, +# "app": "DA-ESS-EndpointProtection", +# "cron_schedule": "*/5 * * * *", +# "description": "test description", +# "disabled": false, +# "name": "Ansible Test", +# "schedule_priority": "default", +# "schedule_window": "0", +# "scheduling": "realtime", +# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' +# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' +# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' +# 'n.src\" as \"src\" | where \"count\">=6', +# "suppress_alerts": false, +# "throttle_fields_to_group_by": [ +# "test_field1" +# ], +# "throttle_window_duration": "5s", +# "time_earliest": "-24h", +# "time_latest": "now", +# "trigger_alert": "once", +# "trigger_alert_when": "number of events", +# "trigger_alert_when_condition": "greater than", +# "trigger_alert_when_value": "10", +# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" +# } +# ] + +# Using deleted +# ------------- + +- name: Example to delete the corelation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + state: deleted + +# RUN output: +# ----------- + +# "after": [], +# "before": [ +# { +# "annotations": { +# "cis20": [ +# "test1" +# ], +# "custom": [ +# { +# "custom_annotations": [ +# "test5" +# ], +# "framework": "test_framework" +# } +# ], +# "kill_chain_phases": [ +# "test3" +# ], +# "mitre_attack": [ +# "test2" +# ], +# "nist": [ +# "test4" +# ] +# }, +# "app": "DA-ESS-EndpointProtection", +# "cron_schedule": "*/5 * * * *", +# "description": "test description", +# "disabled": false, +# "name": "Ansible Test", +# "schedule_priority": "default", +# "schedule_window": "0", +# "scheduling": "realtime", +# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent' +# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai' +# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio' +# 'n.src\" as \"src\" | where \"count\">=6', +# "suppress_alerts": false, +# "throttle_fields_to_group_by": [ +# "test_field1" +# ], +# "throttle_window_duration": "5s", +# "time_earliest": "-24h", +# "time_latest": "now", +# "trigger_alert": "once", +# "trigger_alert_when": "number of events", +# "trigger_alert_when_condition": "greater than", +# "trigger_alert_when_value": "10", +# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite" +# }, +# ], + +""" + +RETURN = """ +before: + description: The configuration as structured data prior to module invocation. + returned: always + type: list + sample: The configuration returned will always be in the same format of the parameters above. +after: + description: The configuration as structured data after module completion. + returned: when changed + type: list + sample: The configuration returned will always be in the same format of the parameters above. +gathered: + description: Facts about the network resource gathered from the remote device as structured data. + returned: when state is I(gathered) + type: dict + sample: > + This output will always be in the same format as the + module argspec. +""" diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py new file mode 100644 index 000000000..080d23d3b --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py @@ -0,0 +1,264 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: data_input_monitor +short_description: Manage Splunk Data Inputs of type Monitor +description: + - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk. +version_added: "1.0.0" +deprecated: + alternative: splunk_data_inputs_monitor + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + name: + description: + - The file or directory path to monitor on the system. + required: True + type: str + state: + description: + - Add or remove a data source. + required: True + choices: + - "present" + - "absent" + type: str + blacklist: + description: + - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed. + required: False + type: str + check_index: + description: + - If set to C(True), the index value is checked to ensure that it is the name of a valid index. + required: False + type: bool + default: False + check_path: + description: + - If set to C(True), the name value is checked to ensure that it exists. + required: False + type: bool + crc_salt: + description: + - A string that modifies the file tracking identity for files in this input. + The magic value <SOURCE> invokes special behavior (see admin documentation). + required: False + type: str + disabled: + description: + - Indicates if input monitoring is disabled. + required: False + default: False + type: bool + followTail: + description: + - If set to C(True), files that are seen for the first time is read from the end. + required: False + type: bool + default: False + host: + description: + - The value to populate in the host field for events from this data input. + required: False + type: str + host_regex: + description: + - Specify a regular expression for a file path. If the path for a file + matches this regular expression, the captured value is used to populate + the host field for events from this data input. The regular expression + must have one capture group. + required: False + type: str + host_segment: + description: + - Use the specified slash-separate segment of the filepath as the host field value. + required: False + type: int + ignore_older_than: + description: + - Specify a time value. If the modification time of a file being monitored + falls outside of this rolling time window, the file is no longer being monitored. + required: False + type: str + index: + description: + - Which index events from this input should be stored in. Defaults to default. + required: False + type: str + recursive: + description: + - Setting this to False prevents monitoring of any subdirectories encountered within this data input. + required: False + type: bool + default: False + rename_source: + description: + - The value to populate in the source field for events from this data input. + The same source should not be used for multiple data inputs. + required: False + type: str + sourcetype: + description: + - The value to populate in the sourcetype field for incoming events. + required: False + type: str + time_before_close: + description: + - When Splunk software reaches the end of a file that is being read, the + file is kept open for a minimum of the number of seconds specified in + this value. After this period has elapsed, the file is checked again for + more data. + required: False + type: int + whitelist: + description: + - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed. + required: False + type: str +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +- name: Example adding data input monitor with splunk.es.data_input_monitor + splunk.es.data_input_monitor: + name: "/var/log/example.log" + state: "present" + recursive: True +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import ( + utils, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + name=dict(required=True, type="str"), + state=dict(choices=["present", "absent"], required=True), + blacklist=dict(required=False, type="str", default=None), + check_index=dict(required=False, type="bool", default=False), + check_path=dict(required=False, type="bool", default=None), + crc_salt=dict(required=False, type="str", default=None), + disabled=dict(required=False, type="bool", default=False), + followTail=dict(required=False, type="bool", default=False), + host=dict(required=False, type="str", default=None), + host_segment=dict(required=False, type="int", default=None), + host_regex=dict(required=False, type="str", default=None), + ignore_older_than=dict(required=False, type="str", default=None), + index=dict(required=False, type="str", default=None), + recursive=dict(required=False, type="bool", default=False), + rename_source=dict(required=False, type="str", default=None), + sourcetype=dict(required=False, type="str", default=None), + time_before_close=dict(required=False, type="int", default=None), + whitelist=dict(required=False, type="str", default=None), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + # map of keys for the splunk REST API that aren't pythonic so we have to + # handle the substitutes + keymap = { + "check_index": "check-index", + "check_path": "check-path", + "crc_salt": "crc-salt", + "ignore_older_than": "ignore-older-than", + "rename_source": "rename-source", + "time_before_close": "time-before-close", + } + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + keymap=keymap, + not_rest_data_keys=["state"], + ) + # This is where the splunk_* args are processed + request_data = splunk_request.get_data() + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + query_dict = utils.remove_empties(query_dict) + + if module.params["state"] == "present": + if query_dict: + needs_change = False + for arg in request_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + # Create it + _data = splunk_request.get_data() + _data["name"] = module.params["name"] + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/monitor", + data=_data, + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + + if module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "servicesNS/nobody/search/data/inputs/monitor/{0}".format( + quote_plus(module.params["name"]) + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py new file mode 100644 index 000000000..5771eb9cc --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py @@ -0,0 +1,276 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# https://github.com/ansible/ansible/issues/65816 +# https://github.com/PyCQA/pylint/issues/214 + +# (c) 2018, Adam Miller (admiller@redhat.com) +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: data_input_network +short_description: Manage Splunk Data Inputs of type TCP or UDP +description: + - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk. +version_added: "1.0.0" +deprecated: + alternative: splunk_data_inputs_network + why: Newer and updated modules released with more functionality. + removed_at_date: '2024-09-01' +options: + protocol: + description: + - Choose between tcp or udp + required: True + choices: + - 'tcp' + - 'udp' + type: str + connection_host: + description: + - Set the host for the remote server that is sending data. + - C(ip) sets the host to the IP address of the remote server sending data. + - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data. + - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname. + default: "ip" + required: False + type: str + choices: + - "ip" + - "dns" + - "none" + state: + description: + - Enable, disable, create, or destroy + choices: + - "present" + - "absent" + - "enabled" + - "disable" + required: False + default: "present" + type: str + datatype: + description: > + Forwarders can transmit three types of data: raw, unparsed, or parsed. + C(cooked) data refers to parsed and unparsed formats. + choices: + - "cooked" + - "raw" + default: "raw" + required: False + type: str + host: + description: + - Host from which the indexer gets data. + required: False + type: str + index: + description: + - default Index to store generated events. + type: str + name: + description: + - The input port which receives raw data. + required: True + type: str + queue: + description: + - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue. + - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more + information about props.conf and rules for timestamping and linebreaking, refer to props.conf and + the online documentation at "Monitor files and directories with inputs.conf" + - Set queue to indexQueue to send your data directly into the index. + choices: + - "parsingQueue" + - "indexQueue" + type: str + required: False + default: "parsingQueue" + rawTcpDoneTimeout: + description: + - Specifies in seconds the timeout value for adding a Done-key. + - If a connection over the port specified by name remains idle after receiving data for specified + number of seconds, it adds a Done-key. This implies the last event is completely received. + default: 10 + type: int + required: False + restrictToHost: + description: + - Allows for restricting this input to only accept data from the host specified here. + required: False + type: str + ssl: + description: + - Enable or disble ssl for the data stream + required: False + type: bool + source: + description: + - Sets the source key/field for events from this input. Defaults to the input file path. + - > + Sets the source key initial value. The key is used during parsing/indexing, in particular to set + the source field during indexing. It is also the source field used at search time. As a convenience, + the chosen string is prepended with 'source::'. + - > + Note: Overriding the source key is generally not recommended. Typically, the input layer provides a + more accurate string to aid in problem analysis and investigation, accurately recording the file from + which the data was retrieved. Consider use of source types, tagging, and search wildcards before + overriding this value. + type: str + sourcetype: + description: + - Set the source type for events from this input. + - '"sourcetype=" is automatically prepended to <string>.' + - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False). + type: str +author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security> +""" + +EXAMPLES = """ +- name: Example adding data input network with splunk.es.data_input_network + splunk.es.data_input_network: + name: "8099" + protocol: "tcp" + state: "present" +""" + + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.parse import quote_plus +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) + + +def main(): + + argspec = dict( + state=dict( + required=False, + choices=["present", "absent", "enabled", "disable"], + default="present", + type="str", + ), + connection_host=dict( + required=False, + choices=["ip", "dns", "none"], + default="ip", + type="str", + ), + host=dict(required=False, type="str", default=None), + index=dict(required=False, type="str", default=None), + name=dict(required=True, type="str"), + protocol=dict(required=True, type="str", choices=["tcp", "udp"]), + queue=dict( + required=False, + type="str", + choices=["parsingQueue", "indexQueue"], + default="parsingQueue", + ), + rawTcpDoneTimeout=dict(required=False, type="int", default=10), + restrictToHost=dict(required=False, type="str", default=None), + ssl=dict(required=False, type="bool", default=None), + source=dict(required=False, type="str", default=None), + sourcetype=dict(required=False, type="str", default=None), + datatype=dict( + required=False, choices=["cooked", "raw"], default="raw" + ), + ) + + module = AnsibleModule(argument_spec=argspec, supports_check_mode=True) + + splunk_request = SplunkRequest( + module, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + not_rest_data_keys=["state", "datatype", "protocol"], + ) + # This is where the splunk_* args are processed + request_data = splunk_request.get_data() + + query_dict = splunk_request.get_by_path( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ) + ) + + if module.params["state"] in ["present", "enabled", "disabled"]: + _data = splunk_request.get_data() + if module.params["state"] in ["present", "enabled"]: + _data["disabled"] = False + else: + _data["disabled"] = True + if query_dict: + needs_change = False + for arg in request_data: + if arg in query_dict["entry"][0]["content"]: + if to_text( + query_dict["entry"][0]["content"][arg] + ) != to_text(request_data[arg]): + needs_change = True + if not needs_change: + module.exit_json( + changed=False, msg="Nothing to do.", splunk_data=query_dict + ) + if module.check_mode and needs_change: + module.exit_json( + changed=True, + msg="A change would have been made if not in check mode.", + splunk_data=query_dict, + ) + if needs_change: + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ), + data=_data, + ) + if module.params["state"] in ["present", "enabled"]: + module.exit_json( + changed=True, msg="{0} updated.", splunk_data=splunk_data + ) + else: + module.exit_json( + changed=True, msg="{0} disabled.", splunk_data=splunk_data + ) + else: + # Create it + splunk_data = splunk_request.create_update( + "servicesNS/nobody/search/data/inputs/{0}/{1}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + ), + data=_data, + ) + module.exit_json( + changed=True, msg="{0} created.", splunk_data=splunk_data + ) + elif module.params["state"] == "absent": + if query_dict: + splunk_data = splunk_request.delete_by_path( + "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format( + quote_plus(module.params["protocol"]), + quote_plus(module.params["datatype"]), + quote_plus(module.params["name"]), + ) + ) + module.exit_json( + changed=True, + msg="Deleted {0}.".format(module.params["name"]), + splunk_data=splunk_data, + ) + + module.exit_json(changed=False, msg="Nothing to do.", splunk_data={}) + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py new file mode 100644 index 000000000..0f4922f77 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py @@ -0,0 +1,300 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright 2022 Red Hat +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: splunk_data_inputs_monitor +short_description: Splunk Data Inputs of type Monitor resource module +description: + - Module to add/modify or delete, File and Directory Monitor Data Inputs in Splunk. + - Tested against Splunk Enterprise Server 8.2.3 +version_added: "2.1.0" +options: + config: + description: + - Configure file and directory monitoring on the system + type: list + elements: dict + suboptions: + name: + description: + - The file or directory path to monitor on the system. + required: True + type: str + blacklist: + description: + - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed. + type: str + check_index: + description: + - If set to C(True), the index value is checked to ensure that it is the name of a valid index. + - This parameter is not returned back by Splunk while obtaining object information. + It is therefore left out while performing idempotency checks + type: bool + check_path: + description: + - If set to C(True), the name value is checked to ensure that it exists. + - This parameter is not returned back by Splunk while obtaining object information. + It is therefore left out while performing idempotency checks + type: bool + crc_salt: + description: + - A string that modifies the file tracking identity for files in this input. + The magic value <SOURCE> invokes special behavior (see admin documentation). + type: str + disabled: + description: + - Indicates if input monitoring is disabled. + type: bool + default: False + follow_tail: + description: + - If set to C(True), files that are seen for the first time is read from the end. + type: bool + host: + description: + - The value to populate in the host field for events from this data input. + type: str + default: "$decideOnStartup" + host_regex: + description: + - Specify a regular expression for a file path. If the path for a file + matches this regular expression, the captured value is used to populate + the host field for events from this data input. The regular expression + must have one capture group. + type: str + host_segment: + description: + - Use the specified slash-separate segment of the filepath as the host field value. + type: int + ignore_older_than: + description: + - Specify a time value. If the modification time of a file being monitored + falls outside of this rolling time window, the file is no longer being monitored. + - This parameter is not returned back by Splunk while obtaining object information. + It is therefore left out while performing idempotency checks + type: str + index: + description: + - Which index events from this input should be stored in. Defaults to default. + type: str + default: "default" + recursive: + description: + - Setting this to False prevents monitoring of any subdirectories encountered within this data input. + type: bool + rename_source: + description: + - The value to populate in the source field for events from this data input. + The same source should not be used for multiple data inputs. + - This parameter is not returned back by Splunk while obtaining object information. + It is therefore left out while performing idempotency checks + type: str + sourcetype: + description: + - The value to populate in the sourcetype field for incoming events. + type: str + time_before_close: + description: + - When Splunk software reaches the end of a file that is being read, the + file is kept open for a minimum of the number of seconds specified in + this value. After this period has elapsed, the file is checked again for + more data. + - This parameter is not returned back by Splunk while obtaining object information. + It is therefore left out while performing idempotency checks + type: int + whitelist: + description: + - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed. + type: str + + running_config: + description: + - The module, by default, will connect to the remote device and retrieve the current + running-config to use as a base for comparing against the contents of source. + There are times when it is not desirable to have the task get the current running-config + for every task in a playbook. The I(running_config) argument allows the implementer + to pass in the configuration to use as the base config for comparison. This + value of this option should be the output received from device by executing + command. + type: str + state: + description: + - The state the configuration should be left in + type: str + choices: + - merged + - replaced + - deleted + - gathered + default: merged + +author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> +""" + +EXAMPLES = """ + +# Using gathered +# -------------- + +- name: Gather config for specified Data inputs monitors + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + - name: "/var" + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "blacklist": "//var/log/[a-z0-9]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "host_regex": "/(test_host)/gm", +# "host_segment": 3, +# "index": "default", +# "name": "/var/log", +# "recursive": true, +# "sourcetype": "test_source", +# "whitelist": "//var/log/[0-9]/gm" +# } +# ] +# + +# Using merged +# ------------ + +- name: Update Data inputs monitors config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: "//var/log/[a-z]/gm" + check_index: True + check_path: True + crc_salt: <SOURCE> + rename_source: "test" + whitelist: "//var/log/[0-9]/gm" + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "blacklist": "//var/log/[a-z]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "host_regex": "/(test_host)/gm", +# "host_segment": 3, +# "index": "default", +# "name": "/var/log", +# "recursive": true, +# "sourcetype": "test_source", +# "whitelist": "//var/log/[0-9]/gm" +# } +# ], +# "before": [ +# { +# "blacklist": "//var/log/[a-z0-9]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "host_regex": "/(test_host)/gm", +# "host_segment": 3, +# "index": "default", +# "name": "/var/log", +# "recursive": true, +# "sourcetype": "test_source", +# "whitelist": "//var/log/[0-9]/gm" +# } +# ], + +# Using replaced +# -------------- + +- name: To Replace Data inputs monitors config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: "//var/log/[a-z0-9]/gm" + crc_salt: <SOURCE> + index: default + state: replaced + +# RUN output: +# ----------- + +# "after": [ +# { +# "blacklist": "//var/log/[a-z0-9]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "/var/log" +# } +# ], +# "before": [ +# { +# "blacklist": "//var/log/[a-z0-9]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "host_regex": "/(test_host)/gm", +# "host_segment": 3, +# "index": "default", +# "name": "/var/log", +# "recursive": true, +# "sourcetype": "test_source", +# "whitelist": "//var/log/[0-9]/gm" +# } +# ], + +# Using deleted +# ----------- +- name: To Delete Data inpur monitor config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + state: deleted + +# RUN output: +# ----------- +# +# "after": [], +# "before": [ +# { +# "blacklist": "//var/log/[a-z0-9]/gm", +# "crc_salt": "<SOURCE>", +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "/var/log" +# } +# ], + +""" + +RETURN = """ +before: + description: The configuration as structured data prior to module invocation. + returned: always + type: list + sample: The configuration returned will always be in the same format of the parameters above. +after: + description: The configuration as structured data after module completion. + returned: when changed + type: list + sample: The configuration returned will always be in the same format of the parameters above. +""" diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py new file mode 100644 index 000000000..688e806f1 --- /dev/null +++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py @@ -0,0 +1,603 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright 2022 Red Hat +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: splunk_data_inputs_network +short_description: Manage Splunk Data Inputs of type TCP or UDP resource module +description: + - Module that allows to add/update or delete of TCP and UDP Data Inputs in Splunk. +version_added: "2.1.0" +options: + config: + description: + - Manage and preview protocol input data. + type: list + elements: dict + suboptions: + name: + description: + - The input port which receives raw data. + required: True + type: str + protocol: + description: + - Choose whether to manage TCP or UDP inputs + required: True + choices: + - 'tcp' + - 'udp' + type: str + connection_host: + description: + - Set the host for the remote server that is sending data. + - C(ip) sets the host to the IP address of the remote server sending data. + - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data. + - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname. + type: str + choices: + - "ip" + - "dns" + - "none" + datatype: + description: + - C(cooked) lets one access cooked TCP input information and create new containers for managing cooked data. + - C(raw) lets one manage raw tcp inputs from forwarders. + - C(splunktcptoken) lets one manage receiver access using tokens. + - C(ssl) Provides access to the SSL configuration of a Splunk server. + This option does not support states I(deleted) and I(replaced). + choices: + - "cooked" + - "raw" + - "splunktcptoken" + - "ssl" + required: False + type: str + disabled: + description: + - Indicates whether the input is disabled. + type: bool + host: + description: + - Host from which the indexer gets data. + type: str + index: + description: + - default Index to store generated events. + type: str + no_appending_timestamp: + description: + - If set to true, prevents Splunk software from prepending a timestamp and hostname to incoming events. + - Only for UDP data input configuration. + type: bool + no_priority_stripping: + description: + - If set to true, Splunk software does not remove the priority field from incoming syslog events. + - Only for UDP data input configuration. + type: bool + queue: + description: + - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue. + - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more + information about props.conf and rules for timestamping and linebreaking, refer to props.conf and + the online documentation at "Monitor files and directories with inputs.conf" + - Set queue to indexQueue to send your data directly into the index. + - Only applicable for "/tcp/raw" and "/udp" APIs + choices: + - "parsingQueue" + - "indexQueue" + type: str + raw_tcp_done_timeout: + description: + - Specifies in seconds the timeout value for adding a Done-key. + - If a connection over the port specified by name remains idle after receiving data for specified + number of seconds, it adds a Done-key. This implies the last event is completely received. + - Only for TCP raw input configuration. + type: int + restrict_to_host: + description: + - Allows for restricting this input to only accept data from the host specified here. + type: str + ssl: + description: + - Enable or disble ssl for the data stream + type: bool + source: + description: + - Sets the source key/field for events from this input. Defaults to the input file path. + - Sets the source key initial value. The key is used during parsing/indexing, in particular to set + the source field during indexing. It is also the source field used at search time. As a convenience, + the chosen string is prepended with 'source::'. + - Note that Overriding the source key is generally not recommended. Typically, the input layer provides a + more accurate string to aid in problem analysis and investigation, accurately recording the file from + which the data was retrieved. Consider use of source types, tagging, and search wildcards before + overriding this value. + type: str + sourcetype: + description: + - Set the source type for events from this input. + - '"sourcetype=" is automatically prepended to <string>.' + - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False). + type: str + token: + description: + - Token value to use for SplunkTcpToken. If unspecified, a token is generated automatically. + type: str + password: + description: + - Server certificate password, if any. + - Only for TCP SSL configuration. + type: str + require_client_cert: + description: + - Determines whether a client must authenticate. + - Only for TCP SSL configuration. + type: str + root_ca: + description: + - Certificate authority list (root file). + - Only for TCP SSL configuration. + type: str + server_cert: + description: + - Full path to the server certificate. + - Only for TCP SSL configuration. + type: str + cipher_suite: + description: + - Specifies list of acceptable ciphers to use in ssl. + - Only obtained for TCP SSL configuration present on device. + type: str + + running_config: + description: + - The module, by default, will connect to the remote device and retrieve the current + running-config to use as a base for comparing against the contents of source. + There are times when it is not desirable to have the task get the current running-config + for every task in a playbook. The I(running_config) argument allows the implementer + to pass in the configuration to use as the base config for comparison. This + value of this option should be the output received from device by executing + command. + type: str + state: + description: + - The state the configuration should be left in + type: str + choices: + - merged + - replaced + - deleted + - gathered + default: merged + +author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security> +""" + +EXAMPLES = """ + +# Using gathered +# -------------- + +- name: Gathering information about TCP Cooked Inputs + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "connection_host": "ip", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "8101" +# }, +# { +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "9997" +# }, +# { +# "connection_host": "ip", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8101", +# "restrict_to_host": "default" +# } +# ] + + +- name: Gathering information about TCP Cooked Inputs by Name + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + name: 9997 + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "datatype": "cooked", +# "disabled": false, +# "host": "$decideOnStartup", +# "name": "9997", +# "protocol": "tcp" +# } +# ] + + +- name: Gathering information about TCP Raw Inputs + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "connection_host": "ip", +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "8099", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 10 +# }, +# { +# "connection_host": "ip", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 10, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ] + +- name: Gathering information about TCP Raw inputs by Name + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8099 + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "8099", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 10 +# } +# ] + +- name: Gathering information about TCP SSL configuration + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: ssl + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "cipher_suite": <cipher-suites>, +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "test_host" +# } +# ] + +- name: Gathering information about TCP SplunkTcpTokens + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: splunktcptoken + state: gathered + +# RUN output: +# ----------- + +# "gathered": [ +# { +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "splunktcptoken://test_token1", +# "token": <token1> +# }, +# { +# "disabled": false, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "splunktcptoken://test_token2", +# "token": <token2> +# } +# ] + +# Using merged +# ------------ + +- name: To add the TCP raw config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + raw_tcp_done_timeout: 9 + restrict_to_host: default + queue: parsingQueue + source: test_source + sourcetype: test_source_type + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 9, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ], +# "before": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 10, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ] + +- name: To add the TCP cooked config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: False + restrict_to_host: default + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "connection_host": "ip", +# "datatype": "cooked", +# "disabled": false, +# "host": "$decideOnStartup", +# "name": "default:8101", +# "protocol": "tcp", +# "restrict_to_host": "default" +# } +# ], +# "before": [ +# { +# "connection_host": "ip", +# "datatype": "cooked", +# "disabled": true, +# "host": "$decideOnStartup", +# "name": "default:8101", +# "protocol": "tcp", +# "restrict_to_host": "default" +# } +# ], + +- name: To add the Splunk TCP token + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: splunktcptoken + name: test_token + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "datatype": "splunktcptoken", +# "name": "splunktcptoken://test_token", +# "protocol": "tcp", +# "token": <token> +# } +# ], +# "before": [], + +- name: To add the Splunk SSL + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: ssl + name: test_host + root_ca: {root CA directory} + server_cert: {server cretificate directory} + state: merged + +# RUN output: +# ----------- + +# "after": [ +# { +# "cipher_suite": <cipher suite>, +# "datatype": "ssl", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "test_host", +# "protocol": "tcp" +# } +# ], +# "before": [] + + +# Using deleted +# ------------- + +- name: To Delete TCP Raw + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: default:8100 + state: deleted + +# RUN output: +# ----------- + +# "after": [], +# "before": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 9, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ] + +# Using replaced +# -------------- + +- name: Replace existing data inputs networks configuration + register: result + splunk.es.splunk_data_inputs_network: + state: replaced + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 10 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + +# RUN output: +# ----------- + +# "after": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 9, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ], +# "before": [ +# { +# "connection_host": "ip", +# "datatype": "raw", +# "disabled": true, +# "host": "$decideOnStartup", +# "index": "default", +# "name": "default:8100", +# "protocol": "tcp", +# "queue": "parsingQueue", +# "raw_tcp_done_timeout": 10, +# "restrict_to_host": "default", +# "source": "test_source", +# "sourcetype": "test_source_type" +# } +# ], + +""" + +RETURN = """ +before: + description: The configuration prior to the module execution. + returned: when state is I(merged), I(replaced), I(deleted) + type: list + sample: > + This output will always be in the same format as the + module argspec. +after: + description: The resulting configuration after module execution. + returned: when changed + type: list + sample: > + This output will always be in the same format as the + module argspec. +gathered: + description: Facts about the network resource gathered from the remote device as structured data. + returned: when state is I(gathered) + type: dict + sample: > + This output will always be in the same format as the + module argspec. +""" diff --git a/ansible_collections/splunk/es/pyproject.toml b/ansible_collections/splunk/es/pyproject.toml new file mode 100644 index 000000000..96ec36d26 --- /dev/null +++ b/ansible_collections/splunk/es/pyproject.toml @@ -0,0 +1,11 @@ +[tool.black] +line-length = 79 + +[tool.pytest.ini_options] +addopts = ["-vvv", "-n", "2", "--log-level", "WARNING", "--color", "yes"] +testpaths = [ + "tests", +] +filterwarnings = [ + 'ignore:AnsibleCollectionFinder has already been configured', +]
\ No newline at end of file diff --git a/ansible_collections/splunk/es/requirements.txt b/ansible_collections/splunk/es/requirements.txt new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/requirements.txt diff --git a/ansible_collections/splunk/es/test-requirements.txt b/ansible_collections/splunk/es/test-requirements.txt new file mode 100644 index 000000000..8002336b1 --- /dev/null +++ b/ansible_collections/splunk/es/test-requirements.txt @@ -0,0 +1,8 @@ +black==22.3.0 ; python_version > '3.5' +flake8 +mock ; python_version < '3.5' +pexpect +pytest-xdist +yamllint +coverage==4.5.4 +git+https://github.com/ansible-community/pytest-ansible-units.git diff --git a/ansible_collections/splunk/es/tests/.keep b/ansible_collections/splunk/es/tests/.keep new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/.keep diff --git a/ansible_collections/splunk/es/tests/integration/network-integration.cfg b/ansible_collections/splunk/es/tests/integration/network-integration.cfg new file mode 100644 index 000000000..d12c1efe2 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/network-integration.cfg @@ -0,0 +1,4 @@ +[persistent_connection] +command_timeout = 100 +connect_timeout = 100 +connect_retry_timeout = 100 diff --git a/ansible_collections/splunk/es/tests/integration/target-prefixes.network b/ansible_collections/splunk/es/tests/integration/target-prefixes.network new file mode 100644 index 000000000..2a852434e --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/target-prefixes.network @@ -0,0 +1 @@ +splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases new file mode 100644 index 000000000..f4c7f6a2b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/aliases @@ -0,0 +1 @@ +network/splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml new file mode 100644 index 000000000..d111fea78 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/adaptive_response_notable_event/tasks/main.yml @@ -0,0 +1,55 @@ +--- +- name: remove previous correlation_search + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "absent" + +- name: create correlation_search + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "present" + +- name: Test splunk.es.adaptive_response_notable_event + adaptive_response_notable_event: + name: "Fake notable event from playbook" + correlation_search_name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake notable event from playbook, description edition." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + - ansiblesecurityautomation + default_status: unassigned + register: adaptive_response_notable_event_out + +- name: Assert Create splunk.es.adaptive_response_notable_event CHANGED + assert: + that: + - adaptive_response_notable_event_out['changed'] == True + - adaptive_response_notable_event_out['failed'] == False + +- name: Validate splunk.es.adaptive_response_notable_event idempotent + adaptive_response_notable_event: + name: "Fake notable event from playbook" + correlation_search_name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake notable event from playbook, description edition." + state: "present" + next_steps: + - ping + - nslookup + recommended_actions: + - script + - ansiblesecurityautomation + register: adaptive_response_notable_event_out2 + +- name: Assert Create splunk.es.adaptive_response_notable_event IDEMPOTENT + assert: + that: + - adaptive_response_notable_event_out2['changed'] == False + - adaptive_response_notable_event_out2['failed'] == False diff --git a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases new file mode 100644 index 000000000..f4c7f6a2b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/aliases @@ -0,0 +1 @@ +network/splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml new file mode 100644 index 000000000..a2ae59ef4 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/correlation_search_info/tasks/main.yml @@ -0,0 +1,74 @@ +--- +- name: Cleanup old correlation_search + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "absent" + +- name: Test correlation_search - CREATE + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "present" + register: correlation_search_create_output + +- name: Assert Create splunk.es.correlation_search CHANGED + assert: + that: + - correlation_search_create_output['changed'] == True + - correlation_search_create_output['failed'] == False + +- name: Test correlation_search - CREATE IDEMPOTENT + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "present" + register: correlation_search_create_output2 + +- name: Assert Create splunk.es.correlation_search IDEMPOTENT + assert: + that: + - correlation_search_create_output2['changed'] == False + - correlation_search_create_output2['failed'] == False + +- name: Test correlation_search_info + correlation_search_info: + name: "Test Fake Coorelation Search From Playbook" + register: correlation_search_info_output + +- name: Assert Create splunk.es.correlation_search CHANGED + assert: + that: + - correlation_search_info_output['changed'] == False + - correlation_search_info_output['failed'] == False + +- name: Test correlation_search - DELETE + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "absent" + register: correlation_search_delete_output + +- name: Assert Create splunk.es.correlation_search CHANGED + assert: + that: + - correlation_search_delete_output['changed'] == True + - correlation_search_delete_output['failed'] == False + +- name: Test correlation_search - DELETE IDEMPOTENT + correlation_search: + name: "Test Fake Coorelation Search From Playbook" + description: "Test Fake Coorelation Search From Playbook, description edition." + search: 'source="/var/log/snort.log"' + state: "absent" + register: correlation_search_delete_output2 + +- name: Assert Create splunk.es.correlation_search IDEMPOTENT + assert: + that: + - correlation_search_delete_output2['changed'] == False + - correlation_search_delete_output2['failed'] == False diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases new file mode 100644 index 000000000..f4c7f6a2b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/aliases @@ -0,0 +1 @@ +network/splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml new file mode 100644 index 000000000..87459760e --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_monitor/tasks/main.yml @@ -0,0 +1,58 @@ +--- +- name: Clean up previous data_input_monitor + data_input_monitor: + name: "/var/log/messages" + state: "absent" + recursive: True + +- name: Test data_input_monitor - CREATE + data_input_monitor: + name: "/var/log/messages" + state: "present" + recursive: True + register: data_input_monitor_output + +- name: Assert Create splunk.es.data_input_monitor CHANGED + assert: + that: + - data_input_monitor_output['changed'] == True + - data_input_monitor_output['failed'] == False + +- name: Test data_input_monitor - CREATE IDEMPOTENT + data_input_monitor: + name: "/var/log/messages" + state: "present" + recursive: True + register: data_input_monitor_output2 + +- name: Assert Create splunk.es.data_input_monitor CREATE IDEMPOTENT + assert: + that: + - data_input_monitor_output2['changed'] == False + - data_input_monitor_output2['failed'] == False + +- name: Test data_input_monitor - DELETE + data_input_monitor: + name: "/var/log/messages" + state: "absent" + recursive: True + register: data_input_monitor_absent_output + +- name: Assert Create splunk.es.data_input_monitor CHANGED + assert: + that: + - data_input_monitor_absent_output['changed'] == True + - data_input_monitor_absent_output['failed'] == False + +- name: Test data_input_monitor - DELETE IDEMPOTENT + data_input_monitor: + name: "/var/log/messages" + state: "absent" + recursive: True + register: data_input_monitor_absent_output2 + +- name: Assert Create splunk.es.data_input_monitor DELETE IDEMPOTENT + assert: + that: + - data_input_monitor_absent_output2['changed'] == False + - data_input_monitor_absent_output2['failed'] == False diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases new file mode 100644 index 000000000..f4c7f6a2b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/aliases @@ -0,0 +1 @@ +network/splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml new file mode 100644 index 000000000..5082458c0 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/data_input_network/tasks/main.yml @@ -0,0 +1,58 @@ +--- +- name: Cleanup previous data_input_network + data_input_network: + name: "8099" + protocol: "tcp" + state: "absent" + +- name: Test data_input_network - CREATE + data_input_network: + name: "8099" + protocol: "tcp" + state: "present" + register: data_input_network_output + +- name: Assert Create splunk.es.data_input_network CHANGED + assert: + that: + - data_input_network_output is changed + - data_input_network_output is not failed + +- name: Test data_input_network - CREATE IDEMPOTENT + data_input_network: + name: "8099" + protocol: "tcp" + state: "present" + register: data_input_network_output2 + +- name: Assert Create splunk.es.data_input_network CREATE IDEMPOTENT + assert: + that: + - data_input_network_output2 is not changed + - data_input_network_output2 is not failed + +- name: Test data_input_network - DELETE + data_input_network: + name: "8099" + protocol: "tcp" + state: "absent" + register: data_input_network_absent_output + +- name: Assert Create splunk.es.data_input_network CHANGED + assert: + that: + - data_input_network_absent_output is changed + - data_input_network_absent_output is not failed + +- name: Test data_input_network - DELETE IDEMPOTENT + data_input_network: + name: "8099" + protocol: "tcp" + state: "absent" + register: data_input_network_absent_output2 + +- name: Assert Create splunk.es.data_input_network DELETE IDEMPOTENT + assert: + that: + - data_input_network_absent_output2 is not changed + - data_input_network_absent_output2 is not failed diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases new file mode 100644 index 000000000..f4c7f6a2b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/aliases @@ -0,0 +1 @@ +network/splunk diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_event/tasks/main.yml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml new file mode 100644 index 000000000..10c0fabcb --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/defaults/main.yaml @@ -0,0 +1,2 @@ +--- +testcase: '*' diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml new file mode 100644 index 000000000..23d65c7ef --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/meta/main.yaml @@ -0,0 +1,2 @@ +--- +dependencies: [] diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml new file mode 100644 index 000000000..dcc81f25f --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/cli.yaml @@ -0,0 +1,18 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests' + patterns: '{{ testcase }}.yaml' + register: test_cases + +- name: set test_items + set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}" + +- name: Run test case (connection=ansible.netcommon.httpapi) + include: '{{ test_case_to_run }}' + vars: + ansible_connection: ansible.netcommon.httpapi + with_items: '{{ test_items }}' + loop_control: + loop_var: test_case_to_run + tags: connection_httpapi diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml new file mode 100644 index 000000000..62cc1ae1e --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/main.yaml @@ -0,0 +1,7 @@ +--- +- include: cli.yaml + tags: + - cli + +- include: redirection.yaml + when: ansible_version.full is version('2.10.0', '>=') diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml new file mode 100644 index 000000000..bafc23a45 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tasks/redirection.yaml @@ -0,0 +1,6 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests/redirection' + patterns: '{{ testcase }}.yaml' + register: test_cases diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml new file mode 100644 index 000000000..02e9074da --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_populate_dim_config.yaml @@ -0,0 +1,49 @@ +--- +- name: create test correlation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + description: test description + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + state: merged + +- name: populate notable event adaptive response for test correlation search + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + drilldown_name: test_drill_name + drilldown_search: test_drill + extract_artifacts: + asset: + - src + - dest + - dvc + - orig_host + identity: + - src_user + - user + - src_user_id + - src_user_role + - user_id + - user_role + - vendor_account + investigation_profiles: + - test profile 1 + - test profile 2 + - test profile 3 + next_steps: + - makestreams + - nbtstat + - nslookup + name: ansible_test_notable + recommended_actions: + - email + - logevent + - makestreams + - nbtstat + security_domain: threat + severity: high + state: merged
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml new file mode 100644 index 000000000..ab4a4a278 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/_remove_dim_config.yaml @@ -0,0 +1,6 @@ +--- +- name: create test correlation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + state: deleted diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml new file mode 100644 index 000000000..e2fa5c8cf --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/deleted.yaml @@ -0,0 +1,33 @@ +--- +- debug: + msg: Start Deleted integration state for adaptive_response_notable_events ansible_connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml + +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Delete adaptive response notable events config + splunk.es.splunk_adaptive_response_notable_events: &id001 + config: + - correlation_search_name: Ansible Test + state: deleted + register: result + + - assert: + that: + - result.changed == true + - merged['after'] == result['adaptive_response_notable_events']['before'] + - merged['before'] == result['adaptive_response_notable_events']['after'] + + - name: Delete attributes of all configured interfaces (IDEMPOTENT) + register: result + splunk.es.splunk_adaptive_response_notable_events: *id001 + + - name: Assert that the previous delete task was idempotent + assert: + that: + - result.changed == false + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml new file mode 100644 index 000000000..f5003ee06 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/gathered.yaml @@ -0,0 +1,22 @@ +--- +- debug: + msg: START adaptive_response_notable_events gathered integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml + +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Gather adaptive response notable events config + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + state: gathered + register: result + + - assert: + that: + - merged['after'] == result['adaptive_response_notable_events']['gathered'] + - result['changed'] == false + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml new file mode 100644 index 000000000..6c949f830 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/merged.yaml @@ -0,0 +1,82 @@ +--- +- debug: + msg: + START Merged adaptive_response_notable_events state for integration tests on connection={{ + ansible_connection }} + +- include_tasks: _remove_dim_config.yaml + +- block: + - name: create test correlation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + description: test description + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + state: merged + + - name: Merge and create new adaptive response notable events configuration + tags: merged + register: result + splunk.es.splunk_adaptive_response_notable_events: &id001 + state: merged + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + drilldown_name: test_drill_name + drilldown_search: test_drill + extract_artifacts: + asset: + - src + - dest + - dvc + - orig_host + identity: + - src_user + - user + - src_user_id + - src_user_role + - user_id + - user_role + - vendor_account + investigation_profiles: + - test profile 1 + - test profile 2 + - test profile 3 + next_steps: + - makestreams + - nbtstat + - nslookup + name: ansible_test_notable + recommended_actions: + - email + - logevent + - makestreams + - nbtstat + security_domain: threat + severity: high + + - name: Assert that task reports change and after dict is correctly generated + assert: + that: + - result['changed'] == true + - merged['after'] == result['adaptive_response_notable_events']['after'] + + - name: Assert that before dicts are correctly generated + assert: + that: + - merged['before'] == result['adaptive_response_notable_events']['before'] + + - name: Merge provided configuration with device configuration (IDEMPOTENT) + register: result + splunk.es.splunk_adaptive_response_notable_events: *id001 + + - name: Assert that the previous task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml new file mode 100644 index 000000000..47cf117b2 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/replaced.yaml @@ -0,0 +1,53 @@ +--- +- debug: + msg: START Replaced adaptive_response_notable_events state for integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Replace existing adaptive response notable events configuration + register: result + splunk.es.splunk_adaptive_response_notable_events: &id001 + state: replaced + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + + - assert: + that: + - result.changed == true + - replaced['before'] == result['adaptive_response_notable_events']['before'] + - replaced['after'] == result['adaptive_response_notable_events']['after'] + + - name: + Replaces device configuration of listed adaptive response notable events configuration with + provided configuration (IDEMPOTENT) + register: result + splunk.es.splunk_adaptive_response_notable_events: *id001 + + - name: Assert that task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml new file mode 100644 index 000000000..dfb936ff0 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/tests/rtt.yaml @@ -0,0 +1,102 @@ +--- +- debug: + msg: START adaptive_response_notable_events round trip integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Apply the provided configuration (base config) + register: base_config + splunk.es.splunk_adaptive_response_notable_events: &id001 + state: merged + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + drilldown_name: test_drill_name + drilldown_search: test_drill + extract_artifacts: + asset: + - src + - dest + - dvc + - orig_host + identity: + - src_user + - user + - src_user_id + - src_user_role + - user_id + - user_role + - vendor_account + investigation_profiles: + - test profile 1 + - test profile 2 + - test profile 3 + next_steps: + - makestreams + - nbtstat + - nslookup + name: ansible_test_notable + recommended_actions: + - email + - logevent + - makestreams + - nbtstat + security_domain: threat + severity: high + + - name: Gather adaptive response notable events configuration facts + register: gather_result + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + state: gathered + + - name: Apply the configuration which need to be reverted + register: result + splunk.es.splunk_adaptive_response_notable_events: + config: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high + state: replaced + + - assert: + that: + - result.changed == true + - replaced['before'] == result['adaptive_response_notable_events']['before'] + - replaced['after'] == result['adaptive_response_notable_events']['after'] + + - name: Revert back to base config using facts round trip + register: revert + splunk.es.splunk_adaptive_response_notable_events: + config: "{{ gather_result['adaptive_response_notable_events']['gathered'] }}" + state: replaced + + - assert: + that: + - revert['changed'] == true + - merged['after'] == revert['adaptive_response_notable_events']['after'] + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml new file mode 100644 index 000000000..8116add0d --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_adaptive_response_notable_events/vars/main.yaml @@ -0,0 +1,101 @@ +--- +merged: + before: [] + + after: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + drilldown_name: test_drill_name + drilldown_search: test_drill + extract_artifacts: + asset: + - src + - dest + - dvc + - orig_host + identity: + - src_user + - user + - src_user_id + - src_user_role + - user_id + - user_role + - vendor_account + investigation_profiles: + - test profile 1 + - test profile 2 + - test profile 3 + next_steps: + - makestreams + - nbtstat + - nslookup + name: ansible_test_notable + recommended_actions: + - email + - logevent + - makestreams + - nbtstat + security_domain: threat + severity: high + +replaced: + before: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + drilldown_name: test_drill_name + drilldown_search: test_drill + extract_artifacts: + asset: + - src + - dest + - dvc + - orig_host + identity: + - src_user + - user + - src_user_id + - src_user_role + - user_id + - user_role + - vendor_account + investigation_profiles: + - test profile 1 + - test profile 2 + - test profile 3 + next_steps: + - makestreams + - nbtstat + - nslookup + name: ansible_test_notable + recommended_actions: + - email + - logevent + - makestreams + - nbtstat + security_domain: threat + severity: high + after: + - correlation_search_name: Ansible Test + description: test notable event + drilldown_earliest_offset: $info_min_time$ + drilldown_latest_offset: $info_max_time$ + extract_artifacts: + asset: + - src + - dest + identity: + - src_user + - user + - src_user_id + next_steps: + - makestreams + name: ansible_test_notable + recommended_actions: + - email + - logevent + security_domain: threat + severity: high
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml new file mode 100644 index 000000000..39b507ff3 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_populate_config.yaml @@ -0,0 +1,38 @@ +--- +- name: merge corre config + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + disabled: false + description: test description + app: DA-ESS-EndpointProtection + annotations: + cis20: + - test1 + mitre_attack: + - test2 + kill_chain_phases: + - test3 + nist: + - test4 + custom: + - framework: test_framework + custom_annotations: + - test5 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: realtime + schedule_window: 0 + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + suppress_alerts: False + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + state: merged
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml new file mode 100644 index 000000000..7707f9191 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/_remove_config.yaml @@ -0,0 +1,6 @@ +--- +- name: delete correlation search + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + state: deleted
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml new file mode 100644 index 000000000..363f0f3b5 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/deleted.yaml @@ -0,0 +1,33 @@ +--- +- debug: + msg: Start Deleted integration state for correlation_searches ansible_connection={{ ansible_connection }} + +- include_tasks: _remove_config.yaml + +- include_tasks: _populate_config.yaml + +- block: + - name: Delete correlation searches config + splunk.es.splunk_correlation_searches: &id001 + config: + - name: Ansible Test + state: deleted + register: result + + - assert: + that: + - result.changed == true + - merged['after'] == result['correlation_searches']['before'] + - merged['before'] == result['correlation_searches']['after'] + + - name: Delete attributes of all configured interfaces (IDEMPOTENT) + register: result + splunk.es.splunk_correlation_searches: *id001 + + - name: Assert that the previous delete task was idempotent + assert: + that: + - result.changed == false + + always: + - include_tasks: _remove_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml new file mode 100644 index 000000000..f612ab397 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/gathered.yaml @@ -0,0 +1,23 @@ +--- +- debug: + msg: START correlation_searches gathered integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_config.yaml + +- include_tasks: _populate_config.yaml + +- block: + - name: Gather correlation searches config + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + - name: Ansible Test1 + state: gathered + register: result + + - assert: + that: + - merged['after'] == result['gathered'] + - result['changed'] == false + always: + - include_tasks: _remove_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml new file mode 100644 index 000000000..a83d1aacf --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/merged.yaml @@ -0,0 +1,70 @@ +--- +- debug: + msg: START Merged correlation_searches state for integration tests on connection={{ + ansible_connection }} + +- include_tasks: _remove_config.yaml + +- block: + - name: Merge and create new correlation searches configuration + tags: merged + register: result + splunk.es.splunk_correlation_searches: &id001 + state: merged + config: + - name: Ansible Test + disabled: false + description: test description + app: DA-ESS-EndpointProtection + annotations: + cis20: + - test1 + mitre_attack: + - test2 + kill_chain_phases: + - test3 + nist: + - test4 + custom: + - framework: test_framework + custom_annotations: + - test5 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: realtime + schedule_window: 0 + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + suppress_alerts: False + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + + - name: Assert that task reports change and after dict is correctly generated + assert: + that: + - result['changed'] == true + - merged['after'] == result['correlation_searches']['after'] + + - name: Assert that before dicts are correctly generated + assert: + that: + - merged['before'] == result['correlation_searches']['before'] + + - name: Merge provided configuration with device configuration (IDEMPOTENT) + register: result + splunk.es.splunk_correlation_searches: *id001 + + - name: Assert that the previous task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml new file mode 100644 index 000000000..a41649a5b --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/replaced.yaml @@ -0,0 +1,72 @@ +--- +- debug: + msg: START Replaced correlation_searches state for integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_config.yaml +- include_tasks: _populate_config.yaml + +- block: + + - name: Replace existing correlation searches configuration + register: result + splunk.es.splunk_correlation_searches: &id001 + state: replaced + config: + - name: Ansible Test + disabled: false + description: test description + app: SplunkEnterpriseSecuritySuite + annotations: + cis20: + - test1 + - test2 + mitre_attack: + - test3 + - test4 + kill_chain_phases: + - test5 + - test6 + nist: + - test7 + - test8 + custom: + - framework: test_framework2 + custom_annotations: + - test9 + - test10 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: continuous + schedule_window: auto + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + - test_field2 + suppress_alerts: True + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + + - assert: + that: + - result.changed == true + - replaced['before'] == result['correlation_searches']['before'] + - replaced['after'] == result['correlation_searches']['after'] + + - name: Replaces device configuration of listed data inputs networks configuration with + provided configuration (IDEMPOTENT) + register: result + splunk.es.splunk_correlation_searches: *id001 + + - name: Assert that task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml new file mode 100644 index 000000000..151e7305a --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_correlation_searches/tests/rtt.yaml @@ -0,0 +1,118 @@ +--- +- debug: + msg: START correlation_searches round trip integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_config.yaml +- include_tasks: _populate_config.yaml + +- block: + - name: Apply the provided configuration (base config) + register: base_config + splunk.es.splunk_correlation_searches: &id001 + state: merged + config: + - name: Ansible Test 3 + disabled: false + description: test description + app: DA-ESS-EndpointProtection + annotations: + cis20: + - test1 + mitre_attack: + - test2 + kill_chain_phases: + - test3 + nist: + - test4 + custom: + - framework: test_framework + custom_annotations: + - test5 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: realtime + schedule_window: 0 + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + suppress_alerts: False + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + + - name: Gather correlation searches configuration facts + register: gather_result + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + state: gathered + + - name: Apply the configuration which need to be reverted + register: result + splunk.es.splunk_correlation_searches: + config: + - name: Ansible Test + disabled: false + description: test description + app: SplunkEnterpriseSecuritySuite + annotations: + cis20: + - test1 + - test2 + mitre_attack: + - test3 + - test4 + kill_chain_phases: + - test5 + - test6 + nist: + - test7 + - test8 + custom: + - framework: test_framework2 + custom_annotations: + - test9 + - test10 + ui_dispatch_context: SplunkEnterpriseSecuritySuite + time_earliest: -24h + time_latest: now + cron_schedule: "*/5 * * * *" + scheduling: continuous + schedule_window: auto + schedule_priority: default + trigger_alert: once + trigger_alert_when: number of events + trigger_alert_when_condition: greater than + trigger_alert_when_value: 10 + throttle_window_duration: 5s + throttle_fields_to_group_by: + - test_field1 + - test_field2 + suppress_alerts: True + search: '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authentication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Failed_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authentication.src" as "src" | where "count">=6' + state: replaced + + - assert: + that: + - result.changed == true + - replaced['before'] == result['correlation_searches']['before'] + - replaced['after'] == result['correlation_searches']['after'] + + - name: Revert back to base config using facts round trip + register: revert + splunk.es.splunk_correlation_searches: + config: "{{ gather_result['gathered'] }}" + state: replaced + + - assert: + that: + - revert['changed'] == true + - merged['after'] == revert['correlation_searches']['after'] + + always: + - include_tasks: _remove_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml new file mode 100644 index 000000000..10c0fabcb --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/defaults/main.yaml @@ -0,0 +1,2 @@ +--- +testcase: '*' diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml new file mode 100644 index 000000000..23d65c7ef --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/meta/main.yaml @@ -0,0 +1,2 @@ +--- +dependencies: [] diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml new file mode 100644 index 000000000..dcc81f25f --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/cli.yaml @@ -0,0 +1,18 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests' + patterns: '{{ testcase }}.yaml' + register: test_cases + +- name: set test_items + set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}" + +- name: Run test case (connection=ansible.netcommon.httpapi) + include: '{{ test_case_to_run }}' + vars: + ansible_connection: ansible.netcommon.httpapi + with_items: '{{ test_items }}' + loop_control: + loop_var: test_case_to_run + tags: connection_httpapi diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml new file mode 100644 index 000000000..62cc1ae1e --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/main.yaml @@ -0,0 +1,7 @@ +--- +- include: cli.yaml + tags: + - cli + +- include: redirection.yaml + when: ansible_version.full is version('2.10.0', '>=') diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml new file mode 100644 index 000000000..bafc23a45 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tasks/redirection.yaml @@ -0,0 +1,6 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests/redirection' + patterns: '{{ testcase }}.yaml' + register: test_cases diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml new file mode 100644 index 000000000..2bb0129a4 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_populate_dim_config.yaml @@ -0,0 +1,22 @@ +--- +- name: Populate data inputs config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: '/\/var\/log\/[a-z]/gm' + check_index: True + check_path: True + crc_salt: <SOURCE> + disabled: False + follow_tail: False + host: "$decideOnStartup" + host_regex: "/(test_host)/gm" + host_segment: 3 + ignore_older_than: 5d + index: default + recursive: True + rename_source: test + sourcetype: test_source_type + time_before_close: 4 + whitelist: '/\/var\/log\/[a-z]/gm' + state: merged
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml new file mode 100644 index 000000000..d0fdb2d90 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/_remove_dim_config.yaml @@ -0,0 +1,6 @@ +--- +- name: Delete data inputs config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + state: deleted
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml new file mode 100644 index 000000000..8f19b500f --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/deleted.yaml @@ -0,0 +1,36 @@ +--- +- debug: + msg: Start Deleted integration state for data_inputs_monitors ansible_connection={{ ansible_connection + }} + +- include_tasks: _remove_dim_config.yaml + +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Delete data inputs monitors config + splunk.es.splunk_data_inputs_monitor: &id001 + config: + - name: /var/log + state: deleted + register: result + + - assert: + that: + - result.changed == true + - "{{ merged['after'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['before'][0] |\ + dict2items)| length==5}}" + - merged['before'] == result['data_inputs_monitor']['after'] + + - name: Delete attributes of all configured interfaces (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_monitor: *id001 + + - name: Assert that the previous delete task was idempotent + assert: + that: + - result.changed == false + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml new file mode 100644 index 000000000..84aae2076 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/gathered.yaml @@ -0,0 +1,25 @@ +--- +- debug: + msg: START data_inputs_monitors gathered integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml + +- include_tasks: _populate_dim_config.yaml + +- block: + - name: Gather data inputs monitors config + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + state: gathered + register: result + + - assert: + that: + - "{{ merged['after'] | dict2items |\ + symmetric_difference(result['gathered'][0] |\ + dict2items)| length==5}}" + - result['changed'] == false + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml new file mode 100644 index 000000000..0388c26c1 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/merged.yaml @@ -0,0 +1,57 @@ +--- +- debug: + msg: START Merged data_inputs_monitor state for integration tests on connection={{ + ansible_connection }} + +- include_tasks: _remove_dim_config.yaml + +- block: + - name: Merge and create new data inputs monitors configuration + tags: merged + register: result + splunk.es.splunk_data_inputs_monitor: &id001 + state: merged + config: + - name: "/var/log" + blacklist: '/\/var\/log\/[a-z]/gm' + check_index: True + check_path: True + crc_salt: <SOURCE> + disabled: False + follow_tail: False + host: "$decideOnStartup" + host_regex: "/(test_host)/gm" + host_segment: 3 + ignore_older_than: 5d + index: default + recursive: True + rename_source: test + sourcetype: test_source_type + time_before_close: 4 + whitelist: '/\/var\/log\/[a-z]/gm' + + - name: Assert that task reports change and after dict is correctly generated + assert: + that: + - result['changed'] == true + - "{{ merged['after'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['after'][0] |\ + dict2items)| length==5}}" + + - name: Assert that before dicts are correctly generated + assert: + that: + - merged['before'] == result['data_inputs_monitor']['before'] + + - name: Merge provided configuration with device configuration (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_monitor: *id001 + + - name: Assert that the previous task was idempotent + assert: + that: + - result['changed'] == false + + always: + + - include_tasks: _remove_dim_config.yaml
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml new file mode 100644 index 000000000..7a9dd8c46 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/replaced.yaml @@ -0,0 +1,43 @@ +--- +- debug: + msg: START Replaced data_inputs_monitor state for integration tests on connection={{ ansible_connection + }} + +- include_tasks: _remove_dim_config.yaml +- include_tasks: _populate_dim_config.yaml + +- block: + + - name: Replace existing data inputs monitors configuration + register: result + splunk.es.splunk_data_inputs_monitor: &id001 + state: replaced + config: + - name: "/var/log" + blacklist: '/\/var\/log\/[a-z0-9]/gm' + crc_salt: <SOURCE> + + + - assert: + that: + - result.changed == true + - "{{ replaced['before'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['before'][0] |\ + dict2items) | length==5}}" + - "{{ replaced['after'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['after'][0] |\ + dict2items) | length==3}}" + + - name: Replaces device configuration of listed data inputs networks configuration with + provided configuration (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_monitor: *id001 + + - name: Assert that task was idempotent + assert: + that: + - result['changed'] == false + + always: + + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml new file mode 100644 index 000000000..4025c446c --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/tests/rtt.yaml @@ -0,0 +1,73 @@ +--- +- debug: + msg: START data_inputs_monitor round trip integration tests on connection={{ ansible_connection }} + +- include_tasks: _remove_dim_config.yaml +- include_tasks: _populate_dim_config.yaml + +- block: + + - name: Apply the provided configuration (base config) + register: base_config + splunk.es.splunk_data_inputs_monitor: &id001 + state: merged + config: + - name: "/var/log" + blacklist: '/\/var\/log\/[a-z]/gm' + check_index: True + check_path: True + crc_salt: <SOURCE> + disabled: False + follow_tail: False + host: "$decideOnStartup" + host_regex: "/(test_host)/gm" + host_segment: 3 + ignore_older_than: 5d + index: default + recursive: True + rename_source: test + sourcetype: test_source_type + time_before_close: 4 + whitelist: '/\/var\/log\/[a-z]/gm' + + - name: Gather data inputs monitors configuration facts + register: gather_result + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + state: gathered + + - name: Apply the configuration which need to be reverted + register: result + splunk.es.splunk_data_inputs_monitor: + config: + - name: "/var/log" + blacklist: '/\/var\/log\/[a-z0-9]/gm' + crc_salt: <SOURCE> + state: replaced + + - assert: + that: + - result.changed == true + - "{{ replaced['before'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['before'][0] |\ + dict2items) | length==5}}" + - "{{ replaced['after'] | dict2items |\ + symmetric_difference(result['data_inputs_monitor']['after'][0] |\ + dict2items) | length==3}}" + + - name: Revert back to base config using facts round trip + register: revert + splunk.es.splunk_data_inputs_monitor: + config: "{{ gather_result['gathered'] }}" + state: replaced + + - assert: + that: + - revert['changed'] == true + - "{{ merged['after'] | dict2items |\ + symmetric_difference(revert['data_inputs_monitor']['after'][0] |\ + dict2items)| length==5}}" + + always: + - include_tasks: _remove_dim_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml new file mode 100644 index 000000000..881a750b4 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_monitor/vars/main.yaml @@ -0,0 +1,46 @@ +--- +merged: + before: [] + + after: + name: "/var/log" + blacklist: '/\/var\/log\/[a-z]/gm' + check_index: True + check_path: True + crc_salt: <SOURCE> + disabled: False + follow_tail: False + host: "$decideOnStartup" + host_regex: "/(test_host)/gm" + host_segment: 3 + ignore_older_than: 5d + index: default + recursive: True + rename_source: test + sourcetype: test_source_type + time_before_close: + whitelist: '/\/var\/log\/[a-z]/gm' + +replaced: + before: + name: "/var/log" + blacklist: '/\/var\/log\/[a-z]/gm' + check_index: True + check_path: True + crc_salt: <SOURCE> + disabled: False + follow_tail: False + host: "$decideOnStartup" + host_regex: "/(test_host)/gm" + host_segment: 3 + ignore_older_than: 5d + index: default + recursive: True + rename_source: test + sourcetype: test_source_type + time_before_close: + whitelist: '/\/var\/log\/[a-z]/gm' + after: + name: "/var/log" + blacklist: '/\/var\/log\/[a-z0-9]/gm' + crc_salt: <SOURCE>
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml new file mode 100644 index 000000000..10c0fabcb --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/defaults/main.yaml @@ -0,0 +1,2 @@ +--- +testcase: '*' diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml new file mode 100644 index 000000000..23d65c7ef --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/meta/main.yaml @@ -0,0 +1,2 @@ +--- +dependencies: [] diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml new file mode 100644 index 000000000..dcc81f25f --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/cli.yaml @@ -0,0 +1,18 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests' + patterns: '{{ testcase }}.yaml' + register: test_cases + +- name: set test_items + set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}" + +- name: Run test case (connection=ansible.netcommon.httpapi) + include: '{{ test_case_to_run }}' + vars: + ansible_connection: ansible.netcommon.httpapi + with_items: '{{ test_items }}' + loop_control: + loop_var: test_case_to_run + tags: connection_httpapi diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml new file mode 100644 index 000000000..62cc1ae1e --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/main.yaml @@ -0,0 +1,7 @@ +--- +- include: cli.yaml + tags: + - cli + +- include: redirection.yaml + when: ansible_version.full is version('2.10.0', '>=') diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml new file mode 100644 index 000000000..bafc23a45 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tasks/redirection.yaml @@ -0,0 +1,6 @@ +--- +- name: collect all test cases + find: + paths: '{{ role_path }}/tests/redirection' + patterns: '{{ testcase }}.yaml' + register: test_cases diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml new file mode 100644 index 000000000..60f87afbf --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_populate_din_config.yaml @@ -0,0 +1,43 @@ +--- +- name: merge data inputs config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 9 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: False + host: "$decideOnStartup" + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: "01234567-0123-0123-0123-012345678901" + - protocol: tcp + datatype: ssl + name: test_host + - protocol: udp + name: 7890 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + no_appending_timestamp: True + no_priority_stripping: True + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + state: merged
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml new file mode 100644 index 000000000..bf904c27d --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/_remove_din_config.yaml @@ -0,0 +1,16 @@ +--- +- name: delete data inputs config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: default:8100 + - protocol: tcp + datatype: cooked + name: default:8101 + - protocol: tcp + datatype: splunktcptoken + name: test_token + - protocol: udp + name: default:7890 + state: deleted
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml new file mode 100644 index 000000000..08974bab5 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/deleted.yaml @@ -0,0 +1,47 @@ +--- +- debug: + msg: + Start Deleted integration state for data_inputs_network ansible_connection={{ ansible_connection + }} + +- include_tasks: _remove_din_config.yaml + +- include_tasks: _populate_din_config.yaml + +- block: + - name: Delete data inputs networks config + splunk.es.splunk_data_inputs_network: &id001 + config: + - protocol: tcp + datatype: raw + name: default:8100 + - protocol: tcp + datatype: cooked + name: default:8101 + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: "01234567-0123-0123-0123-012345678901" + - protocol: udp + name: default:7890 + state: deleted + register: result + + - assert: + that: + - result.changed == true + - "{{ merged['after'] | symmetric_difference(result['data_inputs_network']['before']) |\ + \ length == 1 }}" + - merged['before'] == result['data_inputs_network']['after'] + + - name: Delete attributes of all configured interfaces (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_network: *id001 + + - name: Assert that the previous delete task was idempotent + assert: + that: + - result.changed == false + + always: + - include_tasks: _remove_din_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml new file mode 100644 index 000000000..252ddc7df --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/gathered.yaml @@ -0,0 +1,38 @@ +--- +- debug: + msg: + START data_inputs_network gathered integration tests on connection={{ ansible_connection + }} + +- include_tasks: _remove_din_config.yaml + +- include_tasks: _populate_din_config.yaml + +- block: + - name: Gather data inputs networks config + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: default:8100 + - protocol: tcp + datatype: cooked + name: default:8101 + - protocol: tcp + datatype: splunktcptoken + name: test_token + - protocol: tcp + datatype: ssl + name: test_host + - protocol: udp + name: default:7890 + state: gathered + register: result + + - assert: + that: + - "{{ merged['after'] | symmetric_difference(result['gathered']) |\ + \ length == 0 }}" + - result['changed'] == false + always: + - include_tasks: _remove_din_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml new file mode 100644 index 000000000..842524ec6 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/merged.yaml @@ -0,0 +1,77 @@ +--- +- debug: + msg: + START Merged data_inputs_network state for integration tests on connection={{ + ansible_connection }} + +- include_tasks: _remove_din_config.yaml + +- block: + - name: Merge and create new data inputs networks configuration + tags: merged + register: result + splunk.es.splunk_data_inputs_network: &id001 + state: merged + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 9 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: False + host: "$decideOnStartup" + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: 01234567-0123-0123-0123-012345678901 + - protocol: tcp + datatype: ssl + name: test_host + - protocol: udp + name: 7890 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + no_appending_timestamp: True + no_priority_stripping: True + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + + - name: Assert that task reports change and after dict is correctly generated + assert: + that: + - result['changed'] == true + - "{{ merged['after'] | symmetric_difference(result['data_inputs_network']['after']) |\ + \ length == 0 }}" + + - name: Assert that before dicts are correctly generated + assert: + that: + - merged['before_merged'] == result['data_inputs_network']['before'] + + - name: Merge provided configuration with device configuration (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_network: *id001 + + - name: Assert that the previous task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_din_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml new file mode 100644 index 000000000..340df5282 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/replaced.yaml @@ -0,0 +1,72 @@ +--- +- debug: + msg: + START Replaced data_inputs_network state for integration tests on connection={{ ansible_connection + }} + +- include_tasks: _remove_din_config.yaml +- include_tasks: _populate_din_config.yaml + +- block: + - name: Replace existing data inputs networks configuration + register: result + splunk.es.splunk_data_inputs_network: &id001 + state: replaced + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 10 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: True + host: "$decideOnStartup" + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: 01234567-0123-0123-0123-012345678900 + - protocol: udp + name: 7890 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + no_appending_timestamp: False + no_priority_stripping: False + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + + - assert: + that: + - result.changed == true + - "{{ replaced['before'] | symmetric_difference(result['data_inputs_network']['before']) |\ + \ length == 0 }}" + - "{{ replaced['after'] | symmetric_difference(result['data_inputs_network']['after']) |\ + \ length == 0 }}" + + - name: + Replaces device configuration of listed data inputs networks configuration with + provided configuration (IDEMPOTENT) + register: result + splunk.es.splunk_data_inputs_network: *id001 + + - name: Assert that task was idempotent + assert: + that: + - result['changed'] == false + + always: + - include_tasks: _remove_din_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml new file mode 100644 index 000000000..1fa3e577c --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/tests/rtt.yaml @@ -0,0 +1,131 @@ +--- +- debug: + msg: + START data_inputs_network round trip integration tests on connection={{ ansible_connection + }} + +- include_tasks: _remove_din_config.yaml +- include_tasks: _populate_din_config.yaml + +- block: + - name: Apply the provided configuration (base config) + register: base_config + splunk.es.splunk_data_inputs_network: &id001 + state: merged + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 9 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: False + host: "$decideOnStartup" + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: 01234567-0123-0123-0123-012345678901 + - protocol: udp + name: 7890 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + no_appending_timestamp: True + no_priority_stripping: True + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + + - name: Gather data inputs networks configuration facts + register: gather_result + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: default:8100 + - protocol: tcp + datatype: cooked + name: default:8101 + - protocol: tcp + datatype: splunktcptoken + name: test_token + - protocol: udp + name: default:7890 + state: gathered + + - name: Apply the configuration which need to be reverted + register: result + splunk.es.splunk_data_inputs_network: + config: + - protocol: tcp + datatype: raw + name: 8100 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + queue: parsingQueue + raw_tcp_done_timeout: 10 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: 8101 + connection_host: ip + disabled: True + host: "$decideOnStartup" + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: test_token + token: 01234567-0123-0123-0123-012345678900 + - protocol: udp + name: 7890 + connection_host: ip + disabled: True + host: "$decideOnStartup" + index: default + no_appending_timestamp: False + no_priority_stripping: False + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + state: replaced + + - assert: + that: + - result.changed == true + - "{{ replaced['before'] | symmetric_difference(result['data_inputs_network']['before']) |\ + \ length == 0 }}" + - "{{ replaced['after'] | symmetric_difference(result['data_inputs_network']['after']) |\ + \ length == 0 }}" + + - name: Revert back to base config using facts round trip + register: revert + splunk.es.splunk_data_inputs_network: + config: "{{ gather_result['gathered'] }}" + state: replaced + + - assert: + that: + - revert['changed'] == true + - "{{ merged['after'] | symmetric_difference(revert['data_inputs_network']['after']) |\ + \ length == 1 }}" + + always: + - include_tasks: _remove_din_config.yaml diff --git a/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml new file mode 100644 index 000000000..942b75851 --- /dev/null +++ b/ansible_collections/splunk/es/tests/integration/targets/splunk_data_inputs_network/vars/main.yaml @@ -0,0 +1,129 @@ +--- +merged: + before: [] + before_merged: + - cipher_suite: ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256 + datatype: ssl + disabled: true + host: $decideOnStartup + index: default + name: test_host + protocol: tcp + + after: + - protocol: tcp + datatype: raw + name: default:8100 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + queue: parsingQueue + raw_tcp_done_timeout: 9 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: default:8101 + connection_host: ip + disabled: False + host: $decideOnStartup + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: splunktcptoken://test_token + token: 01234567-0123-0123-0123-012345678901 + - protocol: tcp + datatype: ssl + name: test_host + cipher_suite: ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256 + disabled: true + host: $decideOnStartup + index: default + - protocol: udp + name: default:7890 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + no_appending_timestamp: True + no_priority_stripping: True + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + +replaced: + before: + - protocol: tcp + datatype: raw + name: default:8100 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + queue: parsingQueue + raw_tcp_done_timeout: 9 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: default:8101 + connection_host: ip + disabled: False + host: $decideOnStartup + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: splunktcptoken://test_token + token: 01234567-0123-0123-0123-012345678901 + - protocol: udp + name: default:7890 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + no_appending_timestamp: True + no_priority_stripping: True + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type + after: + - protocol: tcp + datatype: raw + name: default:8100 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + queue: parsingQueue + raw_tcp_done_timeout: 10 + restrict_to_host: default + source: test_source + sourcetype: test_source_type + - protocol: tcp + datatype: cooked + name: default:8101 + connection_host: ip + disabled: True + host: $decideOnStartup + restrict_to_host: default + - protocol: tcp + datatype: splunktcptoken + name: splunktcptoken://test_token + token: 01234567-0123-0123-0123-012345678900 + - protocol: udp + name: default:7890 + connection_host: ip + disabled: True + host: $decideOnStartup + index: default + no_appending_timestamp: False + no_priority_stripping: False + queue: parsingQueue + restrict_to_host: default + source: test_source + sourcetype: test_source_type diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt new file mode 100644 index 000000000..16b4372f7 --- /dev/null +++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.10.txt @@ -0,0 +1 @@ +plugins/action/splunk_correlation_searches.py compile-2.6!skip diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt new file mode 100644 index 000000000..16b4372f7 --- /dev/null +++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.11.txt @@ -0,0 +1 @@ +plugins/action/splunk_correlation_searches.py compile-2.6!skip diff --git a/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt new file mode 100644 index 000000000..ed0da94eb --- /dev/null +++ b/ansible_collections/splunk/es/tests/sanity/ignore-2.9.txt @@ -0,0 +1,9 @@ +plugins/action/splunk_correlation_searches.py compile-2.6!skip +plugins/modules/correlation_search.py validate-modules:deprecation-mismatch +plugins/modules/correlation_search.py validate-modules:invalid-documentation +plugins/modules/data_input_monitor.py validate-modules:deprecation-mismatch +plugins/modules/data_input_monitor.py validate-modules:invalid-documentation +plugins/modules/data_input_network.py validate-modules:deprecation-mismatch +plugins/modules/data_input_network.py validate-modules:invalid-documentation +plugins/modules/adaptive_response_notable_event.py validate-modules:deprecation-mismatch +plugins/modules/adaptive_response_notable_event.py validate-modules:invalid-documentation
\ No newline at end of file diff --git a/ansible_collections/splunk/es/tests/unit/__init__.py b/ansible_collections/splunk/es/tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/compat/__init__.py b/ansible_collections/splunk/es/tests/unit/compat/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/compat/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/compat/builtins.py b/ansible_collections/splunk/es/tests/unit/compat/builtins.py new file mode 100644 index 000000000..bfc8adfbe --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/compat/builtins.py @@ -0,0 +1,34 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +# +# Compat for python2.7 +# + +# One unittest needs to import builtins via __import__() so we need to have +# the string that represents it +try: + import __builtin__ +except ImportError: + BUILTINS = "builtins" +else: + BUILTINS = "__builtin__" diff --git a/ansible_collections/splunk/es/tests/unit/compat/mock.py b/ansible_collections/splunk/es/tests/unit/compat/mock.py new file mode 100644 index 000000000..2ea98a17f --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/compat/mock.py @@ -0,0 +1,128 @@ +# pylint: skip-file +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +""" +Compat module for Python3.x's unittest.mock module +""" +import sys + +# Python 2.7 + +# Note: Could use the pypi mock library on python3.x as well as python2.x. It +# is the same as the python3 stdlib mock library + +try: + # Allow wildcard import because we really do want to import all of mock's + # symbols into this compat shim + # pylint: disable=wildcard-import,unused-wildcard-import + from unittest.mock import * +except ImportError: + # Python 2 + # pylint: disable=wildcard-import,unused-wildcard-import + try: + from mock import * + except ImportError: + print("You need the mock library installed on python2.x to run tests") + + +# Prior to 3.4.4, mock_open cannot handle binary read_data +if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): + file_spec = None + + def _iterate_read_data(read_data): + # Helper for mock_open: + # Retrieve lines from read_data via a generator so that separate calls to + # readline, read, and readlines are properly interleaved + sep = b"\n" if isinstance(read_data, bytes) else "\n" + data_as_list = [l + sep for l in read_data.split(sep)] + + if data_as_list[-1] == sep: + # If the last line ended in a newline, the list comprehension will have an + # extra entry that's just a newline. Remove this. + data_as_list = data_as_list[:-1] + else: + # If there wasn't an extra newline by itself, then the file being + # emulated doesn't have a newline to end the last line remove the + # newline that our naive format() added + data_as_list[-1] = data_as_list[-1][:-1] + + for line in data_as_list: + yield line + + def mock_open(mock=None, read_data=""): + """ + A helper function to create a mock to replace the use of `open`. It works + for `open` called directly or used as a context manager. + + The `mock` argument is the mock object to configure. If `None` (the + default) then a `MagicMock` will be created for you, with the API limited + to methods or attributes available on standard file handles. + + `read_data` is a string for the `read` methoddline`, and `readlines` of the + file handle to return. This is an empty string by default. + """ + + def _readlines_side_effect(*args, **kwargs): + if handle.readlines.return_value is not None: + return handle.readlines.return_value + return list(_data) + + def _read_side_effect(*args, **kwargs): + if handle.read.return_value is not None: + return handle.read.return_value + return type(read_data)().join(_data) + + def _readline_side_effect(): + if handle.readline.return_value is not None: + while True: + yield handle.readline.return_value + for line in _data: + yield line + + global file_spec + if file_spec is None: + import _io + + file_spec = list( + set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))) + ) + + if mock is None: + mock = MagicMock(name="open", spec=open) + + handle = MagicMock(spec=file_spec) + handle.__enter__.return_value = handle + + _data = _iterate_read_data(read_data) + + handle.write.return_value = None + handle.read.return_value = None + handle.readline.return_value = None + handle.readlines.return_value = None + + handle.read.side_effect = _read_side_effect + handle.readline.side_effect = _readline_side_effect() + handle.readlines.side_effect = _readlines_side_effect + + mock.return_value = handle + return mock diff --git a/ansible_collections/splunk/es/tests/unit/compat/unittest.py b/ansible_collections/splunk/es/tests/unit/compat/unittest.py new file mode 100644 index 000000000..df3379b82 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/compat/unittest.py @@ -0,0 +1,39 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +""" +Compat module for Python2.7's unittest module +""" + +import sys + +# Allow wildcard import because we really do want to import all of +# unittests's symbols into this compat shim +# pylint: disable=wildcard-import,unused-wildcard-import +if sys.version_info < (2, 7): + try: + # Need unittest2 on python2.6 + from unittest2 import * + except ImportError: + print("You need unittest2 installed on python2.6.x to run tests") +else: + from unittest import * diff --git a/ansible_collections/splunk/es/tests/unit/mock/__init__.py b/ansible_collections/splunk/es/tests/unit/mock/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/mock/loader.py b/ansible_collections/splunk/es/tests/unit/mock/loader.py new file mode 100644 index 000000000..19c44a7e8 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/loader.py @@ -0,0 +1,116 @@ +# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import os + +from ansible.errors import AnsibleParserError +from ansible.parsing.dataloader import DataLoader +from ansible.module_utils._text import to_bytes, to_text + + +class DictDataLoader(DataLoader): + def __init__(self, file_mapping=None): + file_mapping = {} if file_mapping is None else file_mapping + assert type(file_mapping) == dict + + super(DictDataLoader, self).__init__() + + self._file_mapping = file_mapping + self._build_known_directories() + self._vault_secrets = None + + def load_from_file(self, path, cache=True, unsafe=False): + path = to_text(path) + if path in self._file_mapping: + return self.load(self._file_mapping[path], path) + return None + + # TODO: the real _get_file_contents returns a bytestring, so we actually convert the + # unicode/text it's created with to utf-8 + def _get_file_contents(self, file_name): + path = to_text(file_name) + if path in self._file_mapping: + return (to_bytes(self._file_mapping[path]), False) + else: + raise AnsibleParserError("file not found: %s" % path) + + def path_exists(self, path): + path = to_text(path) + return path in self._file_mapping or path in self._known_directories + + def is_file(self, path): + path = to_text(path) + return path in self._file_mapping + + def is_directory(self, path): + path = to_text(path) + return path in self._known_directories + + def list_directory(self, path): + ret = [] + path = to_text(path) + for x in list(self._file_mapping.keys()) + self._known_directories: + if x.startswith(path): + if os.path.dirname(x) == path: + ret.append(os.path.basename(x)) + return ret + + def is_executable(self, path): + # FIXME: figure out a way to make paths return true for this + return False + + def _add_known_directory(self, directory): + if directory not in self._known_directories: + self._known_directories.append(directory) + + def _build_known_directories(self): + self._known_directories = [] + for path in self._file_mapping: + dirname = os.path.dirname(path) + while dirname not in ("/", ""): + self._add_known_directory(dirname) + dirname = os.path.dirname(dirname) + + def push(self, path, content): + rebuild_dirs = False + if path not in self._file_mapping: + rebuild_dirs = True + + self._file_mapping[path] = content + + if rebuild_dirs: + self._build_known_directories() + + def pop(self, path): + if path in self._file_mapping: + del self._file_mapping[path] + self._build_known_directories() + + def clear(self): + self._file_mapping = dict() + self._known_directories = [] + + def get_basedir(self): + return os.getcwd() + + def set_vault_secrets(self, vault_secrets): + self._vault_secrets = vault_secrets diff --git a/ansible_collections/splunk/es/tests/unit/mock/path.py b/ansible_collections/splunk/es/tests/unit/mock/path.py new file mode 100644 index 000000000..1e5902864 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/path.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import ( + MagicMock, +) +from ansible.utils.path import unfrackpath + + +mock_unfrackpath_noop = MagicMock( + spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x +) diff --git a/ansible_collections/splunk/es/tests/unit/mock/procenv.py b/ansible_collections/splunk/es/tests/unit/mock/procenv.py new file mode 100644 index 000000000..f7ab5fe91 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/procenv.py @@ -0,0 +1,94 @@ +# (c) 2016, Matt Davis <mdavis@ansible.com> +# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import sys +import json + +from contextlib import contextmanager +from io import BytesIO, StringIO +from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest +from ansible.module_utils.six import PY3 +from ansible.module_utils._text import to_bytes + + +@contextmanager +def swap_stdin_and_argv(stdin_data="", argv_data=tuple()): + """ + context manager that temporarily masks the test runner's values for stdin and argv + """ + real_stdin = sys.stdin + real_argv = sys.argv + + if PY3: + fake_stream = StringIO(stdin_data) + fake_stream.buffer = BytesIO(to_bytes(stdin_data)) + else: + fake_stream = BytesIO(to_bytes(stdin_data)) + + try: + sys.stdin = fake_stream + sys.argv = argv_data + + yield + finally: + sys.stdin = real_stdin + sys.argv = real_argv + + +@contextmanager +def swap_stdout(): + """ + context manager that temporarily replaces stdout for tests that need to verify output + """ + old_stdout = sys.stdout + + if PY3: + fake_stream = StringIO() + else: + fake_stream = BytesIO() + + try: + sys.stdout = fake_stream + + yield fake_stream + finally: + sys.stdout = old_stdout + + +class ModuleTestCase(unittest.TestCase): + def setUp(self, module_args=None): + if module_args is None: + module_args = { + "_ansible_remote_tmp": "/tmp", + "_ansible_keep_remote_files": False, + } + + args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args)) + + # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually + self.stdin_swap = swap_stdin_and_argv(stdin_data=args) + self.stdin_swap.__enter__() + + def tearDown(self): + # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually + self.stdin_swap.__exit__(None, None, None) diff --git a/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py new file mode 100644 index 000000000..b34ae1340 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/vault_helper.py @@ -0,0 +1,42 @@ +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils._text import to_bytes + +from ansible.parsing.vault import VaultSecret + + +class TextVaultSecret(VaultSecret): + """A secret piece of text. ie, a password. Tracks text encoding. + + The text encoding of the text may not be the default text encoding so + we keep track of the encoding so we encode it to the same bytes.""" + + def __init__(self, text, encoding=None, errors=None, _bytes=None): + super(TextVaultSecret, self).__init__() + self.text = text + self.encoding = encoding or "utf-8" + self._bytes = _bytes + self.errors = errors or "strict" + + @property + def bytes(self): + """The text encoded with encoding, unless we specifically set _bytes.""" + return self._bytes or to_bytes( + self.text, encoding=self.encoding, errors=self.errors + ) diff --git a/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py new file mode 100644 index 000000000..5df30aaed --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/mock/yaml_helper.py @@ -0,0 +1,167 @@ +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import io +import yaml + +from ansible.module_utils.six import PY3 +from ansible.parsing.yaml.loader import AnsibleLoader +from ansible.parsing.yaml.dumper import AnsibleDumper + + +class YamlTestUtils(object): + """Mixin class to combine with a unittest.TestCase subclass.""" + + def _loader(self, stream): + """Vault related tests will want to override this. + + Vault cases should setup a AnsibleLoader that has the vault password.""" + return AnsibleLoader(stream) + + def _dump_stream(self, obj, stream, dumper=None): + """Dump to a py2-unicode or py3-string stream.""" + if PY3: + return yaml.dump(obj, stream, Dumper=dumper) + else: + return yaml.dump(obj, stream, Dumper=dumper, encoding=None) + + def _dump_string(self, obj, dumper=None): + """Dump to a py2-unicode or py3-string""" + if PY3: + return yaml.dump(obj, Dumper=dumper) + else: + return yaml.dump(obj, Dumper=dumper, encoding=None) + + def _dump_load_cycle(self, obj): + # Each pass though a dump or load revs the 'generation' + # obj to yaml string + string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper) + + # wrap a stream/file like StringIO around that yaml + stream_from_object_dump = io.StringIO(string_from_object_dump) + loader = self._loader(stream_from_object_dump) + # load the yaml stream to create a new instance of the object (gen 2) + obj_2 = loader.get_data() + + # dump the gen 2 objects directory to strings + string_from_object_dump_2 = self._dump_string( + obj_2, dumper=AnsibleDumper + ) + + # The gen 1 and gen 2 yaml strings + self.assertEqual(string_from_object_dump, string_from_object_dump_2) + # the gen 1 (orig) and gen 2 py object + self.assertEqual(obj, obj_2) + + # again! gen 3... load strings into py objects + stream_3 = io.StringIO(string_from_object_dump_2) + loader_3 = self._loader(stream_3) + obj_3 = loader_3.get_data() + + string_from_object_dump_3 = self._dump_string( + obj_3, dumper=AnsibleDumper + ) + + self.assertEqual(obj, obj_3) + # should be transitive, but... + self.assertEqual(obj_2, obj_3) + self.assertEqual(string_from_object_dump, string_from_object_dump_3) + + def _old_dump_load_cycle(self, obj): + """Dump the passed in object to yaml, load it back up, dump again, compare.""" + stream = io.StringIO() + + yaml_string = self._dump_string(obj, dumper=AnsibleDumper) + self._dump_stream(obj, stream, dumper=AnsibleDumper) + + yaml_string_from_stream = stream.getvalue() + + # reset stream + stream.seek(0) + + loader = self._loader(stream) + # loader = AnsibleLoader(stream, vault_password=self.vault_password) + obj_from_stream = loader.get_data() + + stream_from_string = io.StringIO(yaml_string) + loader2 = self._loader(stream_from_string) + # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password) + obj_from_string = loader2.get_data() + + stream_obj_from_stream = io.StringIO() + stream_obj_from_string = io.StringIO() + + if PY3: + yaml.dump( + obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper + ) + yaml.dump( + obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper + ) + else: + yaml.dump( + obj_from_stream, + stream_obj_from_stream, + Dumper=AnsibleDumper, + encoding=None, + ) + yaml.dump( + obj_from_stream, + stream_obj_from_string, + Dumper=AnsibleDumper, + encoding=None, + ) + + yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue() + yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue() + + stream_obj_from_stream.seek(0) + stream_obj_from_string.seek(0) + + if PY3: + yaml_string_obj_from_stream = yaml.dump( + obj_from_stream, Dumper=AnsibleDumper + ) + yaml_string_obj_from_string = yaml.dump( + obj_from_string, Dumper=AnsibleDumper + ) + else: + yaml_string_obj_from_stream = yaml.dump( + obj_from_stream, Dumper=AnsibleDumper, encoding=None + ) + yaml_string_obj_from_string = yaml.dump( + obj_from_string, Dumper=AnsibleDumper, encoding=None + ) + + assert yaml_string == yaml_string_obj_from_stream + assert ( + yaml_string + == yaml_string_obj_from_stream + == yaml_string_obj_from_string + ) + assert ( + yaml_string + == yaml_string_obj_from_stream + == yaml_string_obj_from_string + == yaml_string_stream_obj_from_stream + == yaml_string_stream_obj_from_string + ) + assert obj == obj_from_stream + assert obj == obj_from_string + assert obj == yaml_string_obj_from_stream + assert obj == yaml_string_obj_from_string + assert ( + obj + == obj_from_stream + == obj_from_string + == yaml_string_obj_from_stream + == yaml_string_obj_from_string + ) + return { + "obj": obj, + "yaml_string": yaml_string, + "yaml_string_from_stream": yaml_string_from_stream, + "obj_from_stream": obj_from_stream, + "obj_from_string": obj_from_string, + "yaml_string_obj_from_string": yaml_string_obj_from_string, + } diff --git a/ansible_collections/splunk/es/tests/unit/modules/__init__.py b/ansible_collections/splunk/es/tests/unit/modules/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/modules/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/modules/conftest.py new file mode 100644 index 000000000..e19a1e04c --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/modules/conftest.py @@ -0,0 +1,40 @@ +# Copyright (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json + +import pytest + +from ansible.module_utils.six import string_types +from ansible.module_utils._text import to_bytes +from ansible.module_utils.common._collections_compat import MutableMapping + + +@pytest.fixture +def patch_ansible_module(request, mocker): + if isinstance(request.param, string_types): + args = request.param + elif isinstance(request.param, MutableMapping): + if "ANSIBLE_MODULE_ARGS" not in request.param: + request.param = {"ANSIBLE_MODULE_ARGS": request.param} + if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]: + request.param["ANSIBLE_MODULE_ARGS"][ + "_ansible_remote_tmp" + ] = "/tmp" + if ( + "_ansible_keep_remote_files" + not in request.param["ANSIBLE_MODULE_ARGS"] + ): + request.param["ANSIBLE_MODULE_ARGS"][ + "_ansible_keep_remote_files" + ] = False + args = json.dumps(request.param) + else: + raise Exception( + "Malformed data to the patch_ansible_module pytest fixture" + ) + + mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args)) diff --git a/ansible_collections/splunk/es/tests/unit/modules/utils.py b/ansible_collections/splunk/es/tests/unit/modules/utils.py new file mode 100644 index 000000000..d55afc0b3 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/modules/utils.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json + +from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest +from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes + + +def set_module_args(args): + if "_ansible_remote_tmp" not in args: + args["_ansible_remote_tmp"] = "/tmp" + if "_ansible_keep_remote_files" not in args: + args["_ansible_keep_remote_files"] = False + + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + +class AnsibleExitJson(Exception): + pass + + +class AnsibleFailJson(Exception): + pass + + +def exit_json(*args, **kwargs): + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class ModuleTestCase(unittest.TestCase): + def setUp(self): + self.mock_module = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module.start() + self.mock_sleep = patch("time.sleep") + self.mock_sleep.start() + set_module_args({}) + self.addCleanup(self.mock_module.stop) + self.addCleanup(self.mock_sleep.stop) diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py b/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/action/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py new file mode 100644 index 000000000..b6a84fc78 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_adaptive_response_notable_events.py @@ -0,0 +1,443 @@ +# Copyright (c) 2022 Red Hat +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils.six import PY2 + +builtin_import = "builtins.__import__" +if PY2: + builtin_import = "__builtin__.__import__" + +import tempfile +from ansible.playbook.task import Task +from ansible.template import Templar +from ansible_collections.splunk.es.plugins.action.splunk_adaptive_response_notable_events import ( + ActionModule, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) +from ansible_collections.ansible.utils.tests.unit.compat.mock import ( + MagicMock, + patch, +) + +RESPONSE_PAYLOAD = [ + { + "entry": [ + { + "content": { + "action.notable.param.default_owner": "", + "action.notable.param.default_status": "0", + "action.notable.param.drilldown_name": "test_drill_name", + "action.notable.param.drilldown_search": "test_drill", + "action.notable.param.drilldown_earliest_offset": "$info_min_time$", + "action.notable.param.drilldown_latest_offset": "$info_max_time$", + "action.notable.param.extract_artifacts": '{"asset": ["src", "dest", "dvc", "orig_host"],"identity": ' + '["src_user", "user", "src_user_id", "src_user_role", "user_id", "user_role", "vendor_account"]}', + "action.notable.param.investigation_profiles": '{"profile://test profile 1":{}, "profile://test profile 2":{}, ' + '"profile://test profile 3":{}}', + "action.notable.param.next_steps": '{"version": 1, "data": "[[action|makestreams]][[action|nbtstat]][[action|nslookup]]"}', + "action.notable.param.recommended_actions": "email,logevent,makestreams,nbtstat", + "action.notable.param.rule_description": "test notable event", + "action.notable.param.rule_title": "ansible_test_notable", + "action.notable.param.security_domain": "threat", + "action.notable.param.severity": "high", + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + "actions": "notable", + }, + "name": "Ansible Test", + } + ] + }, + { + "entry": [ + { + "content": { + "action.notable.param.default_owner": "", + "action.notable.param.default_status": "", + "action.notable.param.drilldown_name": "test_drill_name", + "action.notable.param.drilldown_search": "test_drill", + "action.notable.param.drilldown_earliest_offset": "$info_min_time$", + "action.notable.param.drilldown_latest_offset": "$info_max_time$", + "action.notable.param.extract_artifacts": '{"asset": ["src", "dest"],"identity": ["src_user", "user", "src_user_id"]}', + "action.notable.param.investigation_profiles": '{"profile://test profile 1":{}, "profile://test profile 2":{}, ' + '"profile://test profile 3":{}}', + "action.notable.param.next_steps": '{"version": 1, "data": "[[action|makestreams]]"}', + "action.notable.param.recommended_actions": "email,logevent", + "action.notable.param.rule_description": "test notable event", + "action.notable.param.rule_title": "ansible_test_notable", + "action.notable.param.security_domain": "threat", + "action.notable.param.severity": "high", + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + "actions": "notable", + }, + "name": "Ansible Test", + } + ] + }, +] + +REQUEST_PAYLOAD = [ + { + "correlation_search_name": "Ansible Test", + "default_status": "unassigned", + "description": "test notable event", + "drilldown_earliest_offset": "$info_min_time$", + "drilldown_latest_offset": "$info_max_time$", + "drilldown_name": "test_drill_name", + "drilldown_search": "test_drill", + "extract_artifacts": { + "asset": ["src", "dest", "dvc", "orig_host"], + "identity": [ + "src_user", + "user", + "src_user_id", + "src_user_role", + "user_id", + "user_role", + "vendor_account", + ], + }, + "investigation_profiles": [ + "test profile 1", + "test profile 2", + "test profile 3", + ], + "next_steps": ["makestreams", "nbtstat", "nslookup"], + "name": "ansible_test_notable", + "recommended_actions": ["email", "logevent", "makestreams", "nbtstat"], + "security_domain": "threat", + "severity": "high", + }, + { + "correlation_search_name": "Ansible Test", + "description": "test notable event", + "drilldown_earliest_offset": "$info_min_time$", + "drilldown_latest_offset": "$info_max_time$", + "extract_artifacts": { + "asset": ["src", "dest"], + "identity": ["src_user", "user", "src_user_id"], + }, + "next_steps": ["makestreams"], + "name": "ansible_test_notable", + "recommended_actions": ["email", "logevent"], + "security_domain": "threat", + "severity": "high", + }, +] + + +class TestSplunkEsAdaptiveResponseNotableEvents: + def setup(self): + task = MagicMock(Task) + # Ansible > 2.13 looks for check_mode in task + task.check_mode = False + play_context = MagicMock() + # Ansible <= 2.13 looks for check_mode in play_context + play_context.check_mode = False + connection = patch( + "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection" + ) + connection._socket_path = tempfile.NamedTemporaryFile().name + fake_loader = {} + templar = Templar(loader=fake_loader) + self._plugin = ActionModule( + task=task, + connection=connection, + play_context=play_context, + loader=fake_loader, + templar=templar, + shared_loader_obj=None, + ) + self._plugin._task.action = "adaptive_response_notable_events" + self._plugin._task.async_val = False + self._task_vars = {} + self.metadata = { + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + "actions": "notable", + } + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_merged_01( + self, connection, monkeypatch + ): + metadata = { + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + "actions": "", + } + self._plugin.api_response = RESPONSE_PAYLOAD[0] + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {}, metadata + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_merged_02( + self, connection, monkeypatch + ): + self._plugin.api_response = RESPONSE_PAYLOAD[0] + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = ( + RESPONSE_PAYLOAD[0], + self.metadata, + ) + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[1] + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[1]], + } + result = self._plugin.run(task_vars=self._task_vars) + + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_merged_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + def get_by_path(self, path): + return RESPONSE_PAYLOAD[0] + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_replaced_01( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = ( + RESPONSE_PAYLOAD[0], + self.metadata, + ) + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + def get_by_path(self, path): + return RESPONSE_PAYLOAD[0] + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[1]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_replaced_02( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = ( + RESPONSE_PAYLOAD[0], + self.metadata, + ) + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + def get_by_path(self, path): + return RESPONSE_PAYLOAD[0] + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[1]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_replaced_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + def get_by_path(self, path): + return RESPONSE_PAYLOAD[0] + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_deleted( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = ( + RESPONSE_PAYLOAD[0], + self.metadata, + ) + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD[0] + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + self._plugin._task.args = { + "state": "deleted", + "config": [ + { + "correlation_search_name": "Ansible Test", + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_deleted_idempotent( + self, connection + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {}, {} + + self._plugin._task.args = { + "state": "deleted", + "config": [ + { + "correlation_search_name": "Ansible Test", + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_adaptive_response_notable_events_gathered( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = ( + RESPONSE_PAYLOAD[0], + self.metadata, + ) + + self._plugin._task.args = { + "state": "gathered", + "config": [ + { + "correlation_search_name": "Ansible Test", + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py new file mode 100644 index 000000000..fca268c98 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_correlation_searches.py @@ -0,0 +1,373 @@ +# Copyright (c) 2022 Red Hat +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils.six import PY2 + +builtin_import = "builtins.__import__" +if PY2: + builtin_import = "__builtin__.__import__" + +import tempfile +from ansible.playbook.task import Task +from ansible.template import Templar +from ansible_collections.splunk.es.plugins.action.splunk_correlation_searches import ( + ActionModule, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) +from ansible_collections.ansible.utils.tests.unit.compat.mock import ( + MagicMock, + patch, +) + +RESPONSE_PAYLOAD = { + "entry": [ + { + "acl": {"app": "DA-ESS-EndpointProtection"}, + "content": { + "action.correlationsearch.annotations": '{"cis20": ["test1"], "mitre_attack": ["test2"], "kill_chain_phases": ["test3"], ' + '"nist": ["test4"], "test_framework": ["test5"]}', + "action.correlationsearch.enabled": "1", + "action.correlationsearch.label": "Ansible Test", + "alert.digest_mode": True, + "alert.suppress": False, + "alert.suppress.fields": "test_field1", + "alert.suppress.period": "5s", + "alert_comparator": "greater than", + "alert_threshold": "10", + "alert_type": "number of events", + "cron_schedule": "*/5 * * * *", + "description": "test description", + "disabled": False, + "dispatch.earliest_time": "-24h", + "dispatch.latest_time": "now", + "dispatch.rt_backfill": True, + "is_scheduled": True, + "realtime_schedule": True, + "request.ui_dispatch_app": "SplunkEnterpriseSecuritySuite", + "schedule_priority": "default", + "schedule_window": "0", + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + }, + "name": "Ansible Test", + } + ] +} + +REQUEST_PAYLOAD = [ + { + "name": "Ansible Test", + "disabled": False, + "description": "test description", + "app": "DA-ESS-EndpointProtection", + "annotations": { + "cis20": ["test1"], + "mitre_attack": ["test2"], + "kill_chain_phases": ["test3"], + "nist": ["test4"], + "custom": [ + { + "framework": "test_framework", + "custom_annotations": ["test5"], + } + ], + }, + "ui_dispatch_context": "SplunkEnterpriseSecuritySuite", + "time_earliest": "-24h", + "time_latest": "now", + "cron_schedule": "*/5 * * * *", + "scheduling": "realtime", + "schedule_window": "0", + "schedule_priority": "default", + "trigger_alert": "once", + "trigger_alert_when": "number of events", + "trigger_alert_when_condition": "greater than", + "trigger_alert_when_value": "10", + "throttle_window_duration": "5s", + "throttle_fields_to_group_by": ["test_field1"], + "suppress_alerts": False, + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + }, + { + "name": "Ansible Test", + "disabled": False, + "description": "test description", + "app": "SplunkEnterpriseSecuritySuite", + "annotations": { + "cis20": ["test1", "test2"], + "mitre_attack": ["test3", "test4"], + "kill_chain_phases": ["test5", "test6"], + "nist": ["test7", "test8"], + "custom": [ + { + "framework": "test_framework2", + "custom_annotations": ["test9", "test10"], + } + ], + }, + "ui_dispatch_context": "SplunkEnterpriseSecuritySuite", + "time_earliest": "-24h", + "time_latest": "now", + "cron_schedule": "*/5 * * * *", + "scheduling": "continuous", + "schedule_window": "auto", + "schedule_priority": "default", + "trigger_alert": "once", + "trigger_alert_when": "number of events", + "trigger_alert_when_condition": "greater than", + "trigger_alert_when_value": "10", + "throttle_window_duration": "5s", + "throttle_fields_to_group_by": ["test_field1", "test_field2"], + "suppress_alerts": True, + "search": '| tstats summariesonly=true values("Authentication.tag") as "tag",dc("Authentication.user") as "user_count",dc("Authent' + 'ication.dest") as "dest_count",count from datamodel="Authentication"."Authentication" where nodename="Authentication.Fai' + 'led_Authentication" by "Authentication.app","Authentication.src" | rename "Authentication.app" as "app","Authenticatio' + 'n.src" as "src" | where "count">=6', + }, +] + + +class TestSplunkEsCorrelationSearches: + def setup(self): + task = MagicMock(Task) + # Ansible > 2.13 looks for check_mode in task + task.check_mode = False + play_context = MagicMock() + # Ansible <= 2.13 looks for check_mode in play_context + play_context.check_mode = False + connection = patch( + "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection" + ) + connection._socket_path = tempfile.NamedTemporaryFile().name + fake_loader = {} + templar = Templar(loader=fake_loader) + self._plugin = ActionModule( + task=task, + connection=connection, + play_context=play_context, + loader=fake_loader, + templar=templar, + shared_loader_obj=None, + ) + self._plugin._task.action = "correlation_searches" + self._plugin._task.async_val = False + self._task_vars = {} + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_merged(self, connection, monkeypatch): + self._plugin.api_response = RESPONSE_PAYLOAD + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {} + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_merged_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_replaced_01(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[1]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_replaced_02(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[1]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_replaced_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update(self, rest_path, data=None): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_deleted(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + def delete_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + self._plugin._task.args = { + "state": "deleted", + "config": [{"name": "Ansible Test"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_deleted_idempotent(self, connection): + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {} + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "deleted", + "config": [{"name": "Ansible Test"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_correlation_searches_gathered(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "gathered", + "config": [{"name": "Ansible Test"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py new file mode 100644 index 000000000..068fe638d --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_monitors.py @@ -0,0 +1,357 @@ +# Copyright (c) 2022 Red Hat +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils.six import PY2 + +builtin_import = "builtins.__import__" +if PY2: + builtin_import = "__builtin__.__import__" + +import tempfile +from ansible.playbook.task import Task +from ansible.template import Templar +from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_monitor import ( + ActionModule, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) +from ansible_collections.ansible.utils.tests.unit.compat.mock import ( + MagicMock, + patch, +) + +RESPONSE_PAYLOAD = { + "entry": [ + { + "content": { + "_rcvbuf": 1572864, + "blacklist": "//var/log/[a-z]/gm", + "check-index": None, + "crcSalt": "<SOURCE>", + "disabled": False, + "eai:acl": None, + "filecount": 74, + "filestatecount": 82, + "followTail": False, + "host": "$decideOnStartup", + "host_regex": "/(test_host)/gm", + "host_resolved": "ip-172-31-52-131.us-west-2.compute.internal", + "host_segment": 3, + "ignoreOlderThan": "5d", + "index": "default", + "recursive": True, + "source": "test", + "sourcetype": "test_source_type", + "time_before_close": 4, + "whitelist": "//var/log/[0-9]/gm", + }, + "name": "/var/log", + } + ] +} + +REQUEST_PAYLOAD = [ + { + "blacklist": "//var/log/[a-z]/gm", + "crc_salt": "<SOURCE>", + "disabled": False, + "follow_tail": False, + "host": "$decideOnStartup", + "host_regex": "/(test_host)/gm", + "host_segment": 3, + "index": "default", + "name": "/var/log", + "recursive": True, + "sourcetype": "test_source_type", + "whitelist": "//var/log/[0-9]/gm", + }, + { + "blacklist": "//var/log/[a-z0-9]/gm", + "crc_salt": "<SOURCE>", + "disabled": False, + "follow_tail": False, + "host": "$decideOnStartup", + "index": "default", + "name": "/var/log", + "recursive": True, + }, +] + + +class TestSplunkEsDataInputsMonitorRules: + def setup(self): + task = MagicMock(Task) + # Ansible > 2.13 looks for check_mode in task + task.check_mode = False + play_context = MagicMock() + # Ansible <= 2.13 looks for check_mode in play_context + play_context.check_mode = False + connection = patch( + "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection" + ) + connection._socket_path = tempfile.NamedTemporaryFile().name + fake_loader = {} + templar = Templar(loader=fake_loader) + self._plugin = ActionModule( + task=task, + connection=connection, + play_context=play_context, + loader=fake_loader, + templar=templar, + shared_loader_obj=None, + ) + self._plugin._task.action = "data_inputs_monitor" + self._plugin._task.async_val = False + self._task_vars = {} + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_merged(self, connection, monkeypatch): + self._plugin.api_response = RESPONSE_PAYLOAD + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {} + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD[0]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_merged_idempotent(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "merged", + "config": [ + { + "blacklist": "//var/log/[a-z]/gm", + "crc_salt": "<SOURCE>", + "disabled": False, + "follow_tail": False, + "host": "$decideOnStartup", + "host_regex": "/(test_host)/gm", + "host_segment": 3, + "index": "default", + "name": "/var/log", + "recursive": True, + "sourcetype": "test_source_type", + "whitelist": "//var/log/[0-9]/gm", + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_replaced(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = RESPONSE_PAYLOAD + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [ + { + "blacklist": "//var/log/[a-z0-9]/gm", + "crc_salt": "<SOURCE>", + "disabled": False, + "follow_tail": False, + "host": "$decideOnStartup", + "index": "default", + "name": "/var/log", + "recursive": True, + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_replaced_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return { + "entry": [ + { + "content": { + "_rcvbuf": 1572864, + "blacklist": "//var/log/[a-z]/gm", + "check-index": None, + "crcSalt": "<SOURCE>", + "disabled": False, + "eai:acl": None, + "filecount": 74, + "filestatecount": 82, + "followTail": False, + "host": "$decideOnStartup", + "host_regex": "/(test_host)/gm", + "host_resolved": "ip-172-31-52-131.us-west-2.compute.internal", + "host_segment": 3, + "ignoreOlderThan": "5d", + "index": "default", + "recursive": True, + "source": "test", + "sourcetype": "test_source_type", + "time_before_close": 4, + "whitelist": "//var/log/[0-9]/gm", + }, + "name": "/var/log", + } + ] + } + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "replaced", + "config": [ + { + "blacklist": "//var/log/[a-z]/gm", + "crc_salt": "<SOURCE>", + "disabled": False, + "follow_tail": False, + "host": "$decideOnStartup", + "host_regex": "/(test_host)/gm", + "host_segment": 3, + "index": "default", + "name": "/var/log", + "recursive": True, + "sourcetype": "test_source_type", + "whitelist": "//var/log/[0-9]/gm", + } + ], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_deleted(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return RESPONSE_PAYLOAD + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "deleted", + "config": [{"name": "/var/log"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_deleted_idempotent(self, connection): + self._plugin.search_for_resource_name = MagicMock() + self._plugin.search_for_resource_name.return_value = {} + + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + self._plugin._task.args = { + "state": "deleted", + "config": [{"name": "/var/log"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_monitor_gathered(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def get_by_path(self, path): + return RESPONSE_PAYLOAD + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + self._plugin._task.args = { + "state": "gathered", + "config": [{"name": "/var/log"}], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False diff --git a/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py new file mode 100644 index 000000000..dbadf9052 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/action/test_es_data_inputs_network.py @@ -0,0 +1,711 @@ +# Copyright (c) 2022 Red Hat +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +# + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.module_utils.six import PY2 + +builtin_import = "builtins.__import__" +if PY2: + builtin_import = "__builtin__.__import__" + +import tempfile +from ansible.playbook.task import Task +from ansible.template import Templar +from ansible_collections.splunk.es.plugins.action.splunk_data_inputs_network import ( + ActionModule, +) +from ansible_collections.splunk.es.plugins.module_utils.splunk import ( + SplunkRequest, +) +from ansible_collections.ansible.utils.tests.unit.compat.mock import ( + MagicMock, + patch, +) + +RESPONSE_PAYLOAD = { + "tcp_cooked": { + "entry": [ + { + "name": "default:8100", + "content": { + "connection_host": "ip", + "disabled": False, + "host": "$decideOnStartup", + "restrictToHost": "default", + }, + } + ], + }, + "tcp_raw": { + "entry": [ + { + "name": "default:8101", + "content": { + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "queue": "parsingQueue", + "rawTcpDoneTimeout": 9, + "restrictToHost": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + } + ], + }, + "udp": { + "entry": [ + { + "name": "default:7890", + "content": { + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "no_appending_timestamp": True, + "no_priority_stripping": True, + "queue": "parsingQueue", + "restrictToHost": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + } + ], + }, + "splunktcptoken": { + "entry": [ + { + "name": "splunktcptoken://test_token", + "content": { + "token": "01234567-0123-0123-0123-012345678901", + }, + } + ], + }, + "ssl": { + "entry": [ + { + "name": "test_host", + "content": {}, + } + ], + }, +} + +REQUEST_PAYLOAD = { + "tcp_cooked": { + "protocol": "tcp", + "datatype": "cooked", + "name": 8100, + "connection_host": "ip", + "disabled": False, + "host": "$decideOnStartup", + "restrict_to_host": "default", + }, + "tcp_raw": { + "protocol": "tcp", + "datatype": "raw", + "name": 8101, + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "queue": "parsingQueue", + "raw_tcp_done_timeout": 9, + "restrict_to_host": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + "udp": { + "protocol": "udp", + "name": 7890, + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "no_appending_timestamp": True, + "no_priority_stripping": True, + "queue": "parsingQueue", + "restrict_to_host": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + "splunktcptoken": { + "protocol": "tcp", + "datatype": "splunktcptoken", + "name": "test_token", + "token": "01234567-0123-0123-0123-012345678901", + }, + "ssl": { + "protocol": "tcp", + "datatype": "ssl", + "name": "test_host", + }, +} + +REPLACED_RESPONSE_PAYLOAD = { + "tcp_cooked": { + "entry": [ + { + "name": "default:8100", + "content": { + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "restrictToHost": "default", + }, + } + ], + }, + "tcp_raw": { + "entry": [ + { + "name": "default:8101", + "content": { + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "queue": "parsingQueue", + "rawTcpDoneTimeout": 10, + "restrictToHost": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + } + ], + }, + "udp": { + "entry": [ + { + "name": "default:7890", + "content": { + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "no_appending_timestamp": False, + "no_priority_stripping": False, + "queue": "parsingQueue", + "restrictToHost": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + } + ], + }, + "splunktcptoken": { + "entry": [ + { + "name": "splunktcptoken://test_token", + "content": { + "token": "01234567-0123-0123-0123-012345678900", + }, + } + ], + }, +} + +REPLACED_REQUEST_PAYLOAD = { + "tcp_cooked": { + "protocol": "tcp", + "datatype": "cooked", + "name": "default:8100", + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "restrict_to_host": "default", + }, + "tcp_raw": { + "protocol": "tcp", + "datatype": "raw", + "name": "default:8101", + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "queue": "parsingQueue", + "raw_tcp_done_timeout": 10, + "restrict_to_host": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + "udp": { + "protocol": "udp", + "name": "default:7890", + "connection_host": "ip", + "disabled": True, + "host": "$decideOnStartup", + "index": "default", + "no_appending_timestamp": False, + "no_priority_stripping": False, + "queue": "parsingQueue", + "restrict_to_host": "default", + "source": "test_source", + "sourcetype": "test_source_type", + }, + "splunktcptoken": { + "protocol": "tcp", + "datatype": "splunktcptoken", + "name": "splunktcptoken://test_token", + "token": "01234567-0123-0123-0123-012345678900", + }, +} + + +class TestSplunkEsDataInputsNetworksRules: + def setup(self): + task = MagicMock(Task) + # Ansible > 2.13 looks for check_mode in task + task.check_mode = False + play_context = MagicMock() + # Ansible <= 2.13 looks for check_mode in play_context + play_context.check_mode = False + connection = patch( + "ansible_collections.splunk.es.plugins.module_utils.splunk.Connection" + ) + connection._socket_path = tempfile.NamedTemporaryFile().name + fake_loader = {} + templar = Templar(loader=fake_loader) + self._plugin = ActionModule( + task=task, + connection=connection, + play_context=play_context, + loader=fake_loader, + templar=templar, + shared_loader_obj=None, + ) + self._plugin._task.action = "data_inputs_network" + self._plugin._task.async_val = False + self._task_vars = {} + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_merged(self, connection, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + # patch update operation + update_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def get_by_path(self, path): + return {} + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return update_response + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + + # tcp_cooked + update_response = RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # tcp_raw + update_response = RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # udp + update_response = RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # splunktcptoken + update_response = RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # ssl + update_response = RESPONSE_PAYLOAD["ssl"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["ssl"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_merged_idempotent(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + # patch get operation + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def get_by_path(self, path): + return get_response + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + # tcp_cooked + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # tcp_raw + get_response = RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # udp + get_response = RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # splunktcptoken + get_response = RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # ssl + get_response = RESPONSE_PAYLOAD["ssl"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["ssl"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_replaced(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + # patch get operation + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + # patch update operation + update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"] + + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def delete_by_path( + self, rest_path, data=None, mock=None, mock_data=None + ): + return {} + + def create_update( + self, rest_path, data=None, mock=None, mock_data=None + ): + return update_response + + def get_by_path(self, path): + return get_response + + monkeypatch.setattr(SplunkRequest, "create_update", create_update) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + + # tcp_cooked + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + update_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # tcp_raw + get_response = RESPONSE_PAYLOAD["tcp_raw"] + update_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # udp + get_response = RESPONSE_PAYLOAD["udp"] + update_response = REPLACED_RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # splunktcptoken + get_response = RESPONSE_PAYLOAD["splunktcptoken"] + update_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_replaced_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + # patch get operation + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def get_by_path(self, path): + return get_response + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + # tcp_cooked + get_response = REPLACED_RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # tcp_raw + get_response = REPLACED_RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # udp + get_response = REPLACED_RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # splunktcptoken + get_response = REPLACED_RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "replaced", + "config": [REPLACED_REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_deleted(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def delete_by_path( + self, rest_path, data=None, mock=None, mock_data=None + ): + return {} + + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def get_by_path(self, path): + return get_response + + monkeypatch.setattr(SplunkRequest, "delete_by_path", delete_by_path) + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + # tcp_cooked + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # tcp_raw + get_response = RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # udp + get_response = RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + # splunktcptoken + get_response = RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is True + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_deleted_idempotent( + self, conn, monkeypatch + ): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + def get_by_path(self, path): + return {} + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + # tcp_cooked + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # tcp_raw + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # udp + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # splunktcptoken + self._plugin._task.args = { + "state": "deleted", + "config": [REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + @patch("ansible.module_utils.connection.Connection.__rpc__") + def test_es_data_inputs_network_gathered(self, conn, monkeypatch): + self._plugin._connection.socket_path = ( + tempfile.NamedTemporaryFile().name + ) + self._plugin._connection._shell = MagicMock() + + # patch get operation + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + + def get_by_path(self, path): + return get_response + + monkeypatch.setattr(SplunkRequest, "get_by_path", get_by_path) + + # tcp_cooked + get_response = RESPONSE_PAYLOAD["tcp_cooked"] + self._plugin._task.args = { + "state": "gathered", + "config": [REQUEST_PAYLOAD["tcp_cooked"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # tcp_raw + get_response = RESPONSE_PAYLOAD["tcp_raw"] + self._plugin._task.args = { + "state": "gathered", + "config": [REQUEST_PAYLOAD["tcp_raw"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # udp + get_response = RESPONSE_PAYLOAD["udp"] + self._plugin._task.args = { + "state": "gathered", + "config": [REQUEST_PAYLOAD["udp"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # splunktcptoken + get_response = RESPONSE_PAYLOAD["splunktcptoken"] + self._plugin._task.args = { + "state": "gathered", + "config": [REQUEST_PAYLOAD["splunktcptoken"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False + + # ssl + get_response = RESPONSE_PAYLOAD["ssl"] + self._plugin._task.args = { + "state": "merged", + "config": [REQUEST_PAYLOAD["ssl"]], + } + result = self._plugin.run(task_vars=self._task_vars) + assert result["changed"] is False diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/__init__.py diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py new file mode 100644 index 000000000..e19a1e04c --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/conftest.py @@ -0,0 +1,40 @@ +# Copyright (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json + +import pytest + +from ansible.module_utils.six import string_types +from ansible.module_utils._text import to_bytes +from ansible.module_utils.common._collections_compat import MutableMapping + + +@pytest.fixture +def patch_ansible_module(request, mocker): + if isinstance(request.param, string_types): + args = request.param + elif isinstance(request.param, MutableMapping): + if "ANSIBLE_MODULE_ARGS" not in request.param: + request.param = {"ANSIBLE_MODULE_ARGS": request.param} + if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]: + request.param["ANSIBLE_MODULE_ARGS"][ + "_ansible_remote_tmp" + ] = "/tmp" + if ( + "_ansible_keep_remote_files" + not in request.param["ANSIBLE_MODULE_ARGS"] + ): + request.param["ANSIBLE_MODULE_ARGS"][ + "_ansible_keep_remote_files" + ] = False + args = json.dumps(request.param) + else: + raise Exception( + "Malformed data to the patch_ansible_module pytest fixture" + ) + + mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args)) diff --git a/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py new file mode 100644 index 000000000..d55afc0b3 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/plugins/modules/utils.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json + +from ansible_collections.trendmicro.deepsec.tests.unit.compat import unittest +from ansible_collections.trendmicro.deepsec.tests.unit.compat.mock import patch +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes + + +def set_module_args(args): + if "_ansible_remote_tmp" not in args: + args["_ansible_remote_tmp"] = "/tmp" + if "_ansible_keep_remote_files" not in args: + args["_ansible_keep_remote_files"] = False + + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + +class AnsibleExitJson(Exception): + pass + + +class AnsibleFailJson(Exception): + pass + + +def exit_json(*args, **kwargs): + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class ModuleTestCase(unittest.TestCase): + def setUp(self): + self.mock_module = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module.start() + self.mock_sleep = patch("time.sleep") + self.mock_sleep.start() + set_module_args({}) + self.addCleanup(self.mock_module.stop) + self.addCleanup(self.mock_sleep.stop) diff --git a/ansible_collections/splunk/es/tests/unit/requirements.txt b/ansible_collections/splunk/es/tests/unit/requirements.txt new file mode 100644 index 000000000..a9772bea1 --- /dev/null +++ b/ansible_collections/splunk/es/tests/unit/requirements.txt @@ -0,0 +1,42 @@ +boto3 +placebo +pycrypto +passlib +pypsrp +python-memcached +pytz +pyvmomi +redis +requests +setuptools > 0.6 # pytest-xdist installed via requirements does not work with very old setuptools (sanity_ok) +unittest2 ; python_version < '2.7' +importlib ; python_version < '2.7' +netaddr +ipaddress +netapp-lib +solidfire-sdk-python + +# requirements for F5 specific modules +f5-sdk ; python_version >= '2.7' +f5-icontrol-rest ; python_version >= '2.7' +deepdiff + +# requirement for Fortinet specific modules +pyFMG + +# requirement for aci_rest module +xmljson + +# requirement for winrm connection plugin tests +pexpect + +# requirement for the linode module +linode-python # APIv3 +linode_api4 ; python_version > '2.6' # APIv4 + +# requirement for the gitlab module +python-gitlab +httmock + +# requirment for kubevirt modules +openshift ; python_version >= '2.7' diff --git a/ansible_collections/splunk/es/tox.ini b/ansible_collections/splunk/es/tox.ini new file mode 100644 index 000000000..a533ccb30 --- /dev/null +++ b/ansible_collections/splunk/es/tox.ini @@ -0,0 +1,33 @@ +[tox] +minversion = 1.4.2 +envlist = linters +skipsdist = True + +[testenv] +basepython = python3 +deps = -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt +commands = find {toxinidir} -type f -name "*.py[c|o]" -delete + +[testenv:black] +install_command = pip install {opts} {packages} +commands = + black -v -l79 {toxinidir} + +[testenv:linters] +install_command = pip install {opts} {packages} +commands = + black -v -l79 --check {toxinidir} + flake8 {posargs} + +[testenv:venv] +commands = {posargs} + +[flake8] +# E123, E125 skipped as they are invalid PEP-8. + +show-source = True +ignore = E123,E125,E402,E501,E741,W503 +max-line-length = 160 +builtins = _ +exclude = .git,.tox,tests/unit/compat/ |