summaryrefslogtreecommitdiffstats
path: root/ansible_collections/splunk/es/plugins/modules
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/splunk/es/plugins/modules')
-rw-r--r--ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py462
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search.py376
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search_info.py80
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_monitor.py264
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_network.py276
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py462
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py512
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py376
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py80
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py630
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py264
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py276
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py300
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py603
14 files changed, 4961 insertions, 0 deletions
diff --git a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
new file mode 100644
index 000000000..29099424e
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
@@ -0,0 +1,462 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: adaptive_response_notable_event
+short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security Notable Event Adaptive Responses that are associated
+ with a correlation search
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_adaptive_response_notable_events
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - Name of notable event
+ required: true
+ type: str
+ correlation_search_name:
+ description:
+ - Name of correlation search to associate this notable event adaptive response with
+ required: true
+ type: str
+ description:
+ description:
+ - Description of the notable event, this will populate the description field for the web console
+ required: true
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: true
+ choices: [ "present", "absent" ]
+ type: str
+ security_domain:
+ description:
+ - Splunk Security Domain
+ type: str
+ required: False
+ choices:
+ - "access"
+ - "endpoint"
+ - "network"
+ - "threat"
+ - "identity"
+ - "audit"
+ default: "threat"
+ severity:
+ description:
+ - Severity rating
+ type: str
+ required: False
+ choices:
+ - "informational"
+ - "low"
+ - "medium"
+ - "high"
+ - "critical"
+ - "unknown"
+ default: "high"
+ default_owner:
+ description:
+ - Default owner of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ default_status:
+ description:
+ - Default status of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ choices:
+ - "unassigned"
+ - "new"
+ - "in progress"
+ - "pending"
+ - "resolved"
+ - "closed"
+ drill_down_name:
+ description:
+ - Name for drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_search:
+ description:
+ - Drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_earliest_offset:
+ description:
+ - Set the amount of time before the triggering event to search for related
+ events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
+ to match the earliest time of the search
+ type: str
+ required: False
+ default: \"$info_min_time$\"
+ drill_down_latest_offset:
+ description:
+ - Set the amount of time after the triggering event to search for related
+ events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
+ time to match the latest time of the search
+ type: str
+ required: False
+ default: \"$info_max_time$\"
+ investigation_profiles:
+ description:
+ - Investigation profile to assiciate the notable event with.
+ type: str
+ required: False
+ next_steps:
+ description:
+ - List of adaptive responses that should be run next
+ - Describe next steps and response actions that an analyst could take to address this threat.
+ type: list
+ elements: str
+ required: False
+ recommended_actions:
+ description:
+ - List of adaptive responses that are recommended to be run next
+ - Identifying Recommended Adaptive Responses will highlight those actions
+ for the analyst when looking at the list of response actions available,
+ making it easier to find them among the longer list of available actions.
+ type: list
+ elements: str
+ required: False
+ asset_extraction:
+ description:
+ - list of assets to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ elements: str
+ choices:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ default:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ required: False
+ identity_extraction:
+ description:
+ - list of identity fields to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ elements: str
+ choices:
+ - user
+ - src_user
+ default:
+ - user
+ - src_user
+ required: False
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of using splunk.es.adaptive_response_notable_event module
+ splunk.es.adaptive_response_notable_event:
+ name: "Example notable event from Ansible"
+ correlation_search_name: "Example Correlation Search From Ansible"
+ description: "Example notable event from Ansible, description."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+"""
+
+import json
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ correlation_search_name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ security_domain=dict(
+ choices=[
+ "access",
+ "endpoint",
+ "network",
+ "threat",
+ "identity",
+ "audit",
+ ],
+ required=False,
+ default="threat",
+ ),
+ severity=dict(
+ choices=[
+ "informational",
+ "low",
+ "medium",
+ "high",
+ "critical",
+ "unknown",
+ ],
+ required=False,
+ default="high",
+ ),
+ default_owner=dict(required=False, type="str"),
+ default_status=dict(
+ choices=[
+ "unassigned",
+ "new",
+ "in progress",
+ "pending",
+ "resolved",
+ "closed",
+ ],
+ required=False,
+ ),
+ drill_down_name=dict(required=False, type="str"),
+ drill_down_search=dict(required=False, type="str"),
+ drill_down_earliest_offset=dict(
+ required=False, type="str", default="$info_min_time$"
+ ),
+ drill_down_latest_offset=dict(
+ required=False, type="str", default="$info_max_time$"
+ ),
+ investigation_profiles=dict(required=False, type="str"),
+ next_steps=dict(
+ required=False, type="list", elements="str", default=[]
+ ),
+ recommended_actions=dict(
+ required=False, type="list", elements="str", default=[]
+ ),
+ asset_extraction=dict(
+ required=False,
+ type="list",
+ elements="str",
+ default=["src", "dest", "dvc", "orig_host"],
+ choices=["src", "dest", "dvc", "orig_host"],
+ ),
+ identity_extraction=dict(
+ required=False,
+ type="list",
+ elements="str",
+ default=["user", "src_user"],
+ choices=["user", "src_user"],
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ override=False,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ )
+ )
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+
+ # FIXME need to figure out how to properly support these, the possible values appear to
+ # be dynamically created based on what the search is indexing
+ # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
+ # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
+ if module.params["next_steps"]:
+ if len(module.params["next_steps"]) == 1:
+ next_steps = "[[action|{0}]]".format(
+ module.params["next_steps"][0]
+ )
+ else:
+ next_steps = ""
+ for next_step in module.params["next_steps"]:
+ if next_steps:
+ next_steps += "\n[[action|{0}]]".format(next_step)
+ else:
+ next_steps = "[[action|{0}]]".format(next_step)
+
+ # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ next_steps_dict = {"version": 1, "data": next_steps}
+ request_post_data["action.notable.param.next_steps"] = json.dumps(
+ next_steps_dict
+ )
+
+ if module.params["recommended_actions"]:
+ if len(module.params["recommended_actions"]) == 1:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = module.params["recommended_actions"][0]
+ else:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = ",".join(module.params["recommended_actions"])
+
+ request_post_data["action.notable.param.rule_description"] = module.params[
+ "description"
+ ]
+ request_post_data["action.notable.param.rule_title"] = module.params[
+ "name"
+ ]
+ request_post_data["action.notable.param.security_domain"] = module.params[
+ "security_domain"
+ ]
+ request_post_data["action.notable.param.severity"] = module.params[
+ "severity"
+ ]
+ request_post_data["action.notable.param.asset_extraction"] = module.params[
+ "asset_extraction"
+ ]
+ request_post_data[
+ "action.notable.param.identity_extraction"
+ ] = module.params["identity_extraction"]
+
+ # NOTE: this field appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ request_post_data["action.notable.param.verbose"] = "0"
+
+ if module.params["default_owner"]:
+ request_post_data[
+ "action.notable.param.default_owner"
+ ] = module.params["default_owner"]
+
+ if module.params["default_status"]:
+ request_post_data[
+ "action.notable.param.default_status"
+ ] = module.params["default_status"]
+
+ request_post_data = utils.remove_empties(request_post_data)
+
+ if query_dict:
+ request_post_data["search"] = query_dict["entry"][0]["content"][
+ "search"
+ ]
+ if "actions" in query_dict["entry"][0]["content"]:
+ if query_dict["entry"][0]["content"]["actions"] == "notable":
+ pass
+ elif (
+ len(query_dict["entry"][0]["content"]["actions"].split(","))
+ > 0
+ and "notable"
+ not in query_dict["entry"][0]["content"]["actions"]
+ ):
+ request_post_data["actions"] = (
+ query_dict["entry"][0]["content"]["actions"] + ", notable"
+ )
+ else:
+ request_post_data["actions"] = "notable"
+ else:
+ module.fail_json(
+ msg="Unable to find correlation search: {0}",
+ splunk_data=query_dict,
+ )
+
+ if module.params["state"] == "present":
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(
+ module.params["correlation_search_name"]
+ ),
+ splunk_data=splunk_data,
+ )
+
+ if module.params["state"] == "absent":
+ # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ needs_change = True
+ del query_dict["entry"][0]["content"][arg]
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(
+ module.params["correlation_search_name"]
+ ),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search.py b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
new file mode 100644
index 000000000..9c865507b
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
@@ -0,0 +1,376 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_correlation_searches
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: True
+ type: str
+ description:
+ description:
+ - Description of the coorelation search, this will populate the description field for the web console
+ required: True
+ type: str
+ state:
+ description:
+ - Add, remove, enable, or disiable a correlation search.
+ required: True
+ choices: [ "present", "absent", "enabled", "disabled" ]
+ type: str
+ search:
+ description:
+ - SPL search string
+ type: str
+ required: True
+ app:
+ description:
+ - Splunk app to associate the correlation seach with
+ type: str
+ required: False
+ default: "SplunkEnterpriseSecuritySuite"
+ ui_dispatch_context:
+ description:
+ - Set an app to use for links such as the drill-down search in a notable
+ event or links in an email adaptive response action. If None, uses the
+ Application Context.
+ type: str
+ required: False
+ time_earliest:
+ description:
+ - Earliest time using relative time modifiers.
+ type: str
+ required: False
+ default: "-24h"
+ time_latest:
+ description:
+ - Latest time using relative time modifiers.
+ type: str
+ required: False
+ default: "now"
+ cron_schedule:
+ description:
+ - Enter a cron-style schedule.
+ - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
+ - Real-time searches use a default schedule of C('*/5 * * * *').
+ type: str
+ required: False
+ default: "*/5 * * * *"
+ scheduling:
+ description:
+ - Controls the way the scheduler computes the next execution time of a scheduled search.
+ - >
+ Learn more:
+ https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
+ type: str
+ required: False
+ default: "real-time"
+ choices:
+ - "real-time"
+ - "continuous"
+ schedule_window:
+ description:
+ - Let report run at any time within a window that opens at its scheduled run time,
+ to improve efficiency when there are many concurrently scheduled reports.
+ The "auto" setting automatically determines the best window width for the report.
+ type: str
+ required: False
+ default: "0"
+ schedule_priority:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "Default"
+ choices:
+ - "Default"
+ - "Higher"
+ - "Highest"
+ trigger_alert_when:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "number of events"
+ choices:
+ - "number of events"
+ - "number of results"
+ - "number of hosts"
+ - "number of sources"
+ trigger_alert_when_condition:
+ description:
+ - Conditional to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "greater than"
+ choices:
+ - "greater than"
+ - "less than"
+ - "equal to"
+ - "not equal to"
+ - "drops by"
+ - "rises by"
+ trigger_alert_when_value:
+ description:
+ - Value to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "10"
+ throttle_window_duration:
+ description:
+ - "How much time to ignore other events that match the field values specified in Fields to group by."
+ type: str
+ required: False
+ throttle_fields_to_group_by:
+ description:
+ - "Type the fields to consider for matching events for throttling."
+ type: str
+ required: False
+ suppress_alerts:
+ description:
+ - "To suppress alerts from this correlation search or not"
+ type: bool
+ required: False
+ default: False
+notes:
+ - >
+ The following options are not yet supported:
+ throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of creating a correlation search with splunk.es.coorelation_search
+ splunk.es.correlation_search:
+ name: "Example Coorelation Search From Ansible"
+ description: "Example Coorelation Search From Ansible, description."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(
+ choices=["present", "absent", "enabled", "disabled"], required=True
+ ),
+ search=dict(required=True, type="str"),
+ app=dict(
+ type="str", required=False, default="SplunkEnterpriseSecuritySuite"
+ ),
+ ui_dispatch_context=dict(type="str", required=False),
+ time_earliest=dict(type="str", required=False, default="-24h"),
+ time_latest=dict(type="str", required=False, default="now"),
+ cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
+ scheduling=dict(
+ type="str",
+ required=False,
+ default="real-time",
+ choices=["real-time", "continuous"],
+ ),
+ schedule_window=dict(type="str", required=False, default="0"),
+ schedule_priority=dict(
+ type="str",
+ required=False,
+ default="Default",
+ choices=["Default", "Higher", "Highest"],
+ ),
+ trigger_alert_when=dict(
+ type="str",
+ required=False,
+ default="number of events",
+ choices=[
+ "number of events",
+ "number of results",
+ "number of hosts",
+ "number of sources",
+ ],
+ ),
+ trigger_alert_when_condition=dict(
+ type="str",
+ required=False,
+ default="greater than",
+ choices=[
+ "greater than",
+ "less than",
+ "equal to",
+ "not equal to",
+ "drops by",
+ "rises by",
+ ],
+ ),
+ trigger_alert_when_value=dict(
+ type="str", required=False, default="10"
+ ),
+ throttle_window_duration=dict(type="str", required=False),
+ throttle_fields_to_group_by=dict(type="str", required=False),
+ suppress_alerts=dict(type="bool", required=False, default=False),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ if module.params["state"] in ["present", "enabled"]:
+ module_disabled_state = False
+ else:
+ module_disabled_state = True
+
+ splunk_request = SplunkRequest(
+ module,
+ override=False,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+ request_post_data["name"] = module.params["name"]
+ request_post_data["action.correlationsearch.enabled"] = "1"
+ request_post_data["is_scheduled"] = True
+ request_post_data["dispatch.rt_backfill"] = True
+ request_post_data["action.correlationsearch.label"] = module.params["name"]
+ request_post_data["description"] = module.params["description"]
+ request_post_data["search"] = module.params["search"]
+ request_post_data["request.ui_dispatch_app"] = module.params["app"]
+ if module.params["ui_dispatch_context"]:
+ request_post_data["request.ui_dispatch_context"] = module.params[
+ "ui_dispatch_context"
+ ]
+ request_post_data["dispatch.earliest_time"] = module.params[
+ "time_earliest"
+ ]
+ request_post_data["dispatch.latest_time"] = module.params["time_latest"]
+ request_post_data["cron_schedule"] = module.params["cron_schedule"]
+ if module.params["scheduling"] == "real-time":
+ request_post_data["realtime_schedule"] = True
+ else:
+ request_post_data["realtime_schedule"] = False
+ request_post_data["schedule_window"] = module.params["schedule_window"]
+ request_post_data["schedule_priority"] = module.params[
+ "schedule_priority"
+ ].lower()
+ request_post_data["alert_type"] = module.params["trigger_alert_when"]
+ request_post_data["alert_comparator"] = module.params[
+ "trigger_alert_when_condition"
+ ]
+ request_post_data["alert_threshold"] = module.params[
+ "trigger_alert_when_value"
+ ]
+ request_post_data["alert.suppress"] = module.params["suppress_alerts"]
+ request_post_data["disabled"] = module_disabled_state
+
+ request_post_data = utils.remove_empties(request_post_data)
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ if query_dict:
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_post_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
+ del request_post_data[
+ "name"
+ ] # If this is present, splunk assumes we're trying to create a new one wit the same name
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "services/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
new file mode 100644
index 000000000..0ab756989
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search_info
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for the query of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: false
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example usage of splunk.es.correlation_search_info
+ splunk.es.correlation_search_info:
+ name: "Name of correlation search"
+ register: scorrelation_search_info
+
+- name: debug display information gathered
+ debug:
+ var: scorrelation_search_info
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(name=dict(required=False, type="str"))
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/json"},
+ )
+
+ if module.params["name"]:
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+ else:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ )
+
+ module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
new file mode 100644
index 000000000..080d23d3b
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
@@ -0,0 +1,264 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_monitor
+short_description: Manage Splunk Data Inputs of type Monitor
+description:
+ - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_data_inputs_monitor
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - The file or directory path to monitor on the system.
+ required: True
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: True
+ choices:
+ - "present"
+ - "absent"
+ type: str
+ blacklist:
+ description:
+ - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
+ required: False
+ type: str
+ check_index:
+ description:
+ - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ required: False
+ type: bool
+ default: False
+ check_path:
+ description:
+ - If set to C(True), the name value is checked to ensure that it exists.
+ required: False
+ type: bool
+ crc_salt:
+ description:
+ - A string that modifies the file tracking identity for files in this input.
+ The magic value <SOURCE> invokes special behavior (see admin documentation).
+ required: False
+ type: str
+ disabled:
+ description:
+ - Indicates if input monitoring is disabled.
+ required: False
+ default: False
+ type: bool
+ followTail:
+ description:
+ - If set to C(True), files that are seen for the first time is read from the end.
+ required: False
+ type: bool
+ default: False
+ host:
+ description:
+ - The value to populate in the host field for events from this data input.
+ required: False
+ type: str
+ host_regex:
+ description:
+ - Specify a regular expression for a file path. If the path for a file
+ matches this regular expression, the captured value is used to populate
+ the host field for events from this data input. The regular expression
+ must have one capture group.
+ required: False
+ type: str
+ host_segment:
+ description:
+ - Use the specified slash-separate segment of the filepath as the host field value.
+ required: False
+ type: int
+ ignore_older_than:
+ description:
+ - Specify a time value. If the modification time of a file being monitored
+ falls outside of this rolling time window, the file is no longer being monitored.
+ required: False
+ type: str
+ index:
+ description:
+ - Which index events from this input should be stored in. Defaults to default.
+ required: False
+ type: str
+ recursive:
+ description:
+ - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
+ required: False
+ type: bool
+ default: False
+ rename_source:
+ description:
+ - The value to populate in the source field for events from this data input.
+ The same source should not be used for multiple data inputs.
+ required: False
+ type: str
+ sourcetype:
+ description:
+ - The value to populate in the sourcetype field for incoming events.
+ required: False
+ type: str
+ time_before_close:
+ description:
+ - When Splunk software reaches the end of a file that is being read, the
+ file is kept open for a minimum of the number of seconds specified in
+ this value. After this period has elapsed, the file is checked again for
+ more data.
+ required: False
+ type: int
+ whitelist:
+ description:
+ - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
+ required: False
+ type: str
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input monitor with splunk.es.data_input_monitor
+ splunk.es.data_input_monitor:
+ name: "/var/log/example.log"
+ state: "present"
+ recursive: True
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ blacklist=dict(required=False, type="str", default=None),
+ check_index=dict(required=False, type="bool", default=False),
+ check_path=dict(required=False, type="bool", default=None),
+ crc_salt=dict(required=False, type="str", default=None),
+ disabled=dict(required=False, type="bool", default=False),
+ followTail=dict(required=False, type="bool", default=False),
+ host=dict(required=False, type="str", default=None),
+ host_segment=dict(required=False, type="int", default=None),
+ host_regex=dict(required=False, type="str", default=None),
+ ignore_older_than=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ recursive=dict(required=False, type="bool", default=False),
+ rename_source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ time_before_close=dict(required=False, type="int", default=None),
+ whitelist=dict(required=False, type="str", default=None),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ # map of keys for the splunk REST API that aren't pythonic so we have to
+ # handle the substitutes
+ keymap = {
+ "check_index": "check-index",
+ "check_path": "check-path",
+ "crc_salt": "crc-salt",
+ "ignore_older_than": "ignore-older-than",
+ "rename_source": "rename-source",
+ "time_before_close": "time-before-close",
+ }
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ keymap=keymap,
+ not_rest_data_keys=["state"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ query_dict = utils.remove_empties(query_dict)
+
+ if module.params["state"] == "present":
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ _data = splunk_request.get_data()
+ _data["name"] = module.params["name"]
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor",
+ data=_data,
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+
+ if module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_network.py b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
new file mode 100644
index 000000000..5771eb9cc
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
@@ -0,0 +1,276 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_network
+short_description: Manage Splunk Data Inputs of type TCP or UDP
+description:
+ - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_data_inputs_network
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ protocol:
+ description:
+ - Choose between tcp or udp
+ required: True
+ choices:
+ - 'tcp'
+ - 'udp'
+ type: str
+ connection_host:
+ description:
+ - Set the host for the remote server that is sending data.
+ - C(ip) sets the host to the IP address of the remote server sending data.
+ - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
+ - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
+ default: "ip"
+ required: False
+ type: str
+ choices:
+ - "ip"
+ - "dns"
+ - "none"
+ state:
+ description:
+ - Enable, disable, create, or destroy
+ choices:
+ - "present"
+ - "absent"
+ - "enabled"
+ - "disable"
+ required: False
+ default: "present"
+ type: str
+ datatype:
+ description: >
+ Forwarders can transmit three types of data: raw, unparsed, or parsed.
+ C(cooked) data refers to parsed and unparsed formats.
+ choices:
+ - "cooked"
+ - "raw"
+ default: "raw"
+ required: False
+ type: str
+ host:
+ description:
+ - Host from which the indexer gets data.
+ required: False
+ type: str
+ index:
+ description:
+ - default Index to store generated events.
+ type: str
+ name:
+ description:
+ - The input port which receives raw data.
+ required: True
+ type: str
+ queue:
+ description:
+ - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
+ - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
+ information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
+ the online documentation at "Monitor files and directories with inputs.conf"
+ - Set queue to indexQueue to send your data directly into the index.
+ choices:
+ - "parsingQueue"
+ - "indexQueue"
+ type: str
+ required: False
+ default: "parsingQueue"
+ rawTcpDoneTimeout:
+ description:
+ - Specifies in seconds the timeout value for adding a Done-key.
+ - If a connection over the port specified by name remains idle after receiving data for specified
+ number of seconds, it adds a Done-key. This implies the last event is completely received.
+ default: 10
+ type: int
+ required: False
+ restrictToHost:
+ description:
+ - Allows for restricting this input to only accept data from the host specified here.
+ required: False
+ type: str
+ ssl:
+ description:
+ - Enable or disble ssl for the data stream
+ required: False
+ type: bool
+ source:
+ description:
+ - Sets the source key/field for events from this input. Defaults to the input file path.
+ - >
+ Sets the source key initial value. The key is used during parsing/indexing, in particular to set
+ the source field during indexing. It is also the source field used at search time. As a convenience,
+ the chosen string is prepended with 'source::'.
+ - >
+ Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
+ more accurate string to aid in problem analysis and investigation, accurately recording the file from
+ which the data was retrieved. Consider use of source types, tagging, and search wildcards before
+ overriding this value.
+ type: str
+ sourcetype:
+ description:
+ - Set the source type for events from this input.
+ - '"sourcetype=" is automatically prepended to <string>.'
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ type: str
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input network with splunk.es.data_input_network
+ splunk.es.data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+"""
+
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ state=dict(
+ required=False,
+ choices=["present", "absent", "enabled", "disable"],
+ default="present",
+ type="str",
+ ),
+ connection_host=dict(
+ required=False,
+ choices=["ip", "dns", "none"],
+ default="ip",
+ type="str",
+ ),
+ host=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ name=dict(required=True, type="str"),
+ protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
+ queue=dict(
+ required=False,
+ type="str",
+ choices=["parsingQueue", "indexQueue"],
+ default="parsingQueue",
+ ),
+ rawTcpDoneTimeout=dict(required=False, type="int", default=10),
+ restrictToHost=dict(required=False, type="str", default=None),
+ ssl=dict(required=False, type="bool", default=None),
+ source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ datatype=dict(
+ required=False, choices=["cooked", "raw"], default="raw"
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state", "datatype", "protocol"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ _data = splunk_request.get_data()
+ if module.params["state"] in ["present", "enabled"]:
+ _data["disabled"] = False
+ else:
+ _data["disabled"] = True
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ ),
+ data=_data,
+ )
+ if module.params["state"] in ["present", "enabled"]:
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ module.exit_json(
+ changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ ),
+ data=_data,
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
new file mode 100644
index 000000000..29099424e
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
@@ -0,0 +1,462 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: adaptive_response_notable_event
+short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security Notable Event Adaptive Responses that are associated
+ with a correlation search
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_adaptive_response_notable_events
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - Name of notable event
+ required: true
+ type: str
+ correlation_search_name:
+ description:
+ - Name of correlation search to associate this notable event adaptive response with
+ required: true
+ type: str
+ description:
+ description:
+ - Description of the notable event, this will populate the description field for the web console
+ required: true
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: true
+ choices: [ "present", "absent" ]
+ type: str
+ security_domain:
+ description:
+ - Splunk Security Domain
+ type: str
+ required: False
+ choices:
+ - "access"
+ - "endpoint"
+ - "network"
+ - "threat"
+ - "identity"
+ - "audit"
+ default: "threat"
+ severity:
+ description:
+ - Severity rating
+ type: str
+ required: False
+ choices:
+ - "informational"
+ - "low"
+ - "medium"
+ - "high"
+ - "critical"
+ - "unknown"
+ default: "high"
+ default_owner:
+ description:
+ - Default owner of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ default_status:
+ description:
+ - Default status of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ choices:
+ - "unassigned"
+ - "new"
+ - "in progress"
+ - "pending"
+ - "resolved"
+ - "closed"
+ drill_down_name:
+ description:
+ - Name for drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_search:
+ description:
+ - Drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_earliest_offset:
+ description:
+ - Set the amount of time before the triggering event to search for related
+ events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
+ to match the earliest time of the search
+ type: str
+ required: False
+ default: \"$info_min_time$\"
+ drill_down_latest_offset:
+ description:
+ - Set the amount of time after the triggering event to search for related
+ events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
+ time to match the latest time of the search
+ type: str
+ required: False
+ default: \"$info_max_time$\"
+ investigation_profiles:
+ description:
+ - Investigation profile to assiciate the notable event with.
+ type: str
+ required: False
+ next_steps:
+ description:
+ - List of adaptive responses that should be run next
+ - Describe next steps and response actions that an analyst could take to address this threat.
+ type: list
+ elements: str
+ required: False
+ recommended_actions:
+ description:
+ - List of adaptive responses that are recommended to be run next
+ - Identifying Recommended Adaptive Responses will highlight those actions
+ for the analyst when looking at the list of response actions available,
+ making it easier to find them among the longer list of available actions.
+ type: list
+ elements: str
+ required: False
+ asset_extraction:
+ description:
+ - list of assets to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ elements: str
+ choices:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ default:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ required: False
+ identity_extraction:
+ description:
+ - list of identity fields to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ elements: str
+ choices:
+ - user
+ - src_user
+ default:
+ - user
+ - src_user
+ required: False
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of using splunk.es.adaptive_response_notable_event module
+ splunk.es.adaptive_response_notable_event:
+ name: "Example notable event from Ansible"
+ correlation_search_name: "Example Correlation Search From Ansible"
+ description: "Example notable event from Ansible, description."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+"""
+
+import json
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ correlation_search_name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ security_domain=dict(
+ choices=[
+ "access",
+ "endpoint",
+ "network",
+ "threat",
+ "identity",
+ "audit",
+ ],
+ required=False,
+ default="threat",
+ ),
+ severity=dict(
+ choices=[
+ "informational",
+ "low",
+ "medium",
+ "high",
+ "critical",
+ "unknown",
+ ],
+ required=False,
+ default="high",
+ ),
+ default_owner=dict(required=False, type="str"),
+ default_status=dict(
+ choices=[
+ "unassigned",
+ "new",
+ "in progress",
+ "pending",
+ "resolved",
+ "closed",
+ ],
+ required=False,
+ ),
+ drill_down_name=dict(required=False, type="str"),
+ drill_down_search=dict(required=False, type="str"),
+ drill_down_earliest_offset=dict(
+ required=False, type="str", default="$info_min_time$"
+ ),
+ drill_down_latest_offset=dict(
+ required=False, type="str", default="$info_max_time$"
+ ),
+ investigation_profiles=dict(required=False, type="str"),
+ next_steps=dict(
+ required=False, type="list", elements="str", default=[]
+ ),
+ recommended_actions=dict(
+ required=False, type="list", elements="str", default=[]
+ ),
+ asset_extraction=dict(
+ required=False,
+ type="list",
+ elements="str",
+ default=["src", "dest", "dvc", "orig_host"],
+ choices=["src", "dest", "dvc", "orig_host"],
+ ),
+ identity_extraction=dict(
+ required=False,
+ type="list",
+ elements="str",
+ default=["user", "src_user"],
+ choices=["user", "src_user"],
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ override=False,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ )
+ )
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+
+ # FIXME need to figure out how to properly support these, the possible values appear to
+ # be dynamically created based on what the search is indexing
+ # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
+ # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
+ if module.params["next_steps"]:
+ if len(module.params["next_steps"]) == 1:
+ next_steps = "[[action|{0}]]".format(
+ module.params["next_steps"][0]
+ )
+ else:
+ next_steps = ""
+ for next_step in module.params["next_steps"]:
+ if next_steps:
+ next_steps += "\n[[action|{0}]]".format(next_step)
+ else:
+ next_steps = "[[action|{0}]]".format(next_step)
+
+ # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ next_steps_dict = {"version": 1, "data": next_steps}
+ request_post_data["action.notable.param.next_steps"] = json.dumps(
+ next_steps_dict
+ )
+
+ if module.params["recommended_actions"]:
+ if len(module.params["recommended_actions"]) == 1:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = module.params["recommended_actions"][0]
+ else:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = ",".join(module.params["recommended_actions"])
+
+ request_post_data["action.notable.param.rule_description"] = module.params[
+ "description"
+ ]
+ request_post_data["action.notable.param.rule_title"] = module.params[
+ "name"
+ ]
+ request_post_data["action.notable.param.security_domain"] = module.params[
+ "security_domain"
+ ]
+ request_post_data["action.notable.param.severity"] = module.params[
+ "severity"
+ ]
+ request_post_data["action.notable.param.asset_extraction"] = module.params[
+ "asset_extraction"
+ ]
+ request_post_data[
+ "action.notable.param.identity_extraction"
+ ] = module.params["identity_extraction"]
+
+ # NOTE: this field appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ request_post_data["action.notable.param.verbose"] = "0"
+
+ if module.params["default_owner"]:
+ request_post_data[
+ "action.notable.param.default_owner"
+ ] = module.params["default_owner"]
+
+ if module.params["default_status"]:
+ request_post_data[
+ "action.notable.param.default_status"
+ ] = module.params["default_status"]
+
+ request_post_data = utils.remove_empties(request_post_data)
+
+ if query_dict:
+ request_post_data["search"] = query_dict["entry"][0]["content"][
+ "search"
+ ]
+ if "actions" in query_dict["entry"][0]["content"]:
+ if query_dict["entry"][0]["content"]["actions"] == "notable":
+ pass
+ elif (
+ len(query_dict["entry"][0]["content"]["actions"].split(","))
+ > 0
+ and "notable"
+ not in query_dict["entry"][0]["content"]["actions"]
+ ):
+ request_post_data["actions"] = (
+ query_dict["entry"][0]["content"]["actions"] + ", notable"
+ )
+ else:
+ request_post_data["actions"] = "notable"
+ else:
+ module.fail_json(
+ msg="Unable to find correlation search: {0}",
+ splunk_data=query_dict,
+ )
+
+ if module.params["state"] == "present":
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(
+ module.params["correlation_search_name"]
+ ),
+ splunk_data=splunk_data,
+ )
+
+ if module.params["state"] == "absent":
+ # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ needs_change = True
+ del query_dict["entry"][0]["content"][arg]
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(
+ module.params["correlation_search_name"]
+ ),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
new file mode 100644
index 000000000..fa680a511
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
@@ -0,0 +1,512 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright 2022 Red Hat
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: splunk_adaptive_response_notable_events
+short_description: Manage Adaptive Responses notable events resource module
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security Notable Event Adaptive Responses that are associated
+ with a correlation search
+ - Tested against Splunk Enterprise Server 8.2.3
+version_added: "2.1.0"
+options:
+ config:
+ description:
+ - Configure file and directory monitoring on the system
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - Name of notable event
+ type: str
+ correlation_search_name:
+ description:
+ - Name of correlation search to associate this notable event adaptive response with
+ required: true
+ type: str
+ description:
+ description:
+ - Description of the notable event, this will populate the description field for the web console
+ type: str
+ security_domain:
+ description:
+ - Splunk Security Domain
+ type: str
+ choices:
+ - "access"
+ - "endpoint"
+ - "network"
+ - "threat"
+ - "identity"
+ - "audit"
+ default: "threat"
+ severity:
+ description:
+ - Severity rating
+ type: str
+ choices:
+ - "informational"
+ - "low"
+ - "medium"
+ - "high"
+ - "critical"
+ - "unknown"
+ default: "high"
+ default_owner:
+ description:
+ - Default owner of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ default_status:
+ description:
+ - Default status of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ choices:
+ - "unassigned"
+ - "new"
+ - "in progress"
+ - "pending"
+ - "resolved"
+ - "closed"
+ drilldown_name:
+ description:
+ - Name for drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ drilldown_search:
+ description:
+ - Drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ drilldown_earliest_offset:
+ description:
+ - Set the amount of time before the triggering event to search for related
+ events. For example, 2h. Use '$info_min_time$' to set the drill-down time
+ to match the earliest time of the search
+ type: str
+ default: '$info_min_time$'
+ drilldown_latest_offset:
+ description:
+ - Set the amount of time after the triggering event to search for related
+ events. For example, 1m. Use '$info_max_time$' to set the drill-down
+ time to match the latest time of the search
+ type: str
+ default: '$info_max_time$'
+ investigation_profiles:
+ description:
+ - Investigation profile to associate the notable event with.
+ type: list
+ elements: str
+ next_steps:
+ description:
+ - List of adaptive responses that should be run next
+ - Describe next steps and response actions that an analyst could take to address this threat.
+ type: list
+ elements: str
+ recommended_actions:
+ description:
+ - List of adaptive responses that are recommended to be run next
+ - Identifying Recommended Adaptive Responses will highlight those actions
+ for the analyst when looking at the list of response actions available,
+ making it easier to find them among the longer list of available actions.
+ type: list
+ elements: str
+ extract_artifacts:
+ description:
+ - Assets and identities to be extracted
+ type: dict
+ suboptions:
+ asset:
+ description:
+ - list of assets to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ elements: str
+ choices:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ file:
+ description:
+ - list of files to extract
+ type: list
+ elements: str
+ identity:
+ description:
+ - list of identity fields to extract, select any one or many of the available choices
+ - defaults to 'user' and 'src_user'
+ type: list
+ elements: str
+ choices:
+ - user
+ - src_user
+ - src_user_id
+ - user_id
+ - src_user_role
+ - user_role
+ - vendor_account
+ url:
+ description:
+ - list of URLs to extract
+ type: list
+ elements: str
+ running_config:
+ description:
+ - The module, by default, will connect to the remote device and retrieve the current
+ running-config to use as a base for comparing against the contents of source.
+ There are times when it is not desirable to have the task get the current running-config
+ for every task in a playbook. The I(running_config) argument allows the implementer
+ to pass in the configuration to use as the base config for comparison. This
+ value of this option should be the output received from device by executing
+ command.
+ type: str
+ state:
+ description:
+ - The state the configuration should be left in
+ type: str
+ choices:
+ - merged
+ - replaced
+ - deleted
+ - gathered
+ default: merged
+
+author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+# Using gathered
+# --------------
+
+- name: Gather adaptive response notable events config
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ - correlation_search_name: Ansible Test 2
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "correlation_search_name": "Ansible Test",
+# "description": "test notable event",
+# "drilldown_earliest_offset": "$info_min_time$",
+# "drilldown_latest_offset": "$info_max_time$",
+# "drilldown_name": "test_drill_name",
+# "drilldown_search": "test_drill",
+# "extract_artifacts": {
+# "asset": [
+# "src",
+# "dest",
+# "dvc",
+# "orig_host"
+# ],
+# "identity": [
+# "src_user",
+# "user",
+# "src_user_id",
+# "src_user_role",
+# "user_id",
+# "user_role",
+# "vendor_account"
+# ]
+# },
+# "investigation_profiles": [
+# "test profile 1",
+# "test profile 2",
+# "test profile 3"
+# ],
+# "next_steps": [
+# "makestreams",
+# "nbtstat",
+# "nslookup"
+# ],
+# "name": "ansible_test_notable",
+# "recommended_actions": [
+# "email",
+# "logevent",
+# "makestreams",
+# "nbtstat"
+# ],
+# "security_domain": "threat",
+# "severity": "high"
+# },
+# { } # there is no configuration associated with "/var"
+# ]
+
+# Using merged
+# ------------
+
+- name: Example to add config
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ next_steps:
+ - makestreams
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ security_domain: threat
+ severity: high
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "correlation_search_name": "Ansible Test",
+# "description": "test notable event",
+# "drilldown_earliest_offset": "$info_min_time$",
+# "drilldown_latest_offset": "$info_max_time$",
+# "drilldown_name": "test_drill_name",
+# "drilldown_search": "test_drill",
+# "extract_artifacts": {
+# "asset": [
+# "src",
+# "dest",
+# "dvc",
+# "orig_host"
+# ],
+# "identity": [
+# "src_user",
+# "user",
+# "src_user_id",
+# "src_user_role",
+# "user_id",
+# "user_role",
+# "vendor_account"
+# ]
+# },
+# "investigation_profiles": [
+# "test profile 1",
+# "test profile 2",
+# "test profile 3"
+# ],
+# "next_steps": [
+# "makestreams",
+# "nbtstat",
+# "nslookup"
+# ],
+# "name": "ansible_test_notable",
+# "recommended_actions": [
+# "email",
+# "logevent",
+# "makestreams",
+# "nbtstat"
+# ],
+# "security_domain": "threat",
+# "severity": "high"
+# }
+# ],
+# "before": [],
+
+# Using replaced
+# --------------
+
+- name: Example to Replace the config
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ description: test notable event
+ drilldown_earliest_offset: $info_min_time$
+ drilldown_latest_offset: $info_max_time$
+ extract_artifacts:
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
+ next_steps:
+ - makestreams
+ name: ansible_test_notable
+ recommended_actions:
+ - email
+ - logevent
+ security_domain: threat
+ severity: high
+ state: replaced
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "correlation_search_name": "Ansible Test",
+# "description": "test notable event",
+# "drilldown_earliest_offset": "$info_min_time$",
+# "drilldown_latest_offset": "$info_max_time$",
+# "extract_artifacts": {
+# "asset": [
+# "src",
+# "dest"
+# ],
+# "identity": [
+# "src_user",
+# "user",
+# "src_user_id"
+# ]
+# },
+# "next_steps": [
+# "makestreams"
+# ],
+# "name": "ansible_test_notable",
+# "recommended_actions": [
+# "email",
+# "logevent"
+# ],
+# "security_domain": "threat",
+# "severity": "high"
+# }
+# ],
+# "before": [
+# {
+# "correlation_search_name": "Ansible Test",
+# "description": "test notable event",
+# "drilldown_earliest_offset": "$info_min_time$",
+# "drilldown_latest_offset": "$info_max_time$",
+# "drilldown_name": "test_drill_name",
+# "drilldown_search": "test_drill",
+# "extract_artifacts": {
+# "asset": [
+# "src",
+# "dest",
+# "dvc",
+# "orig_host"
+# ],
+# "identity": [
+# "src_user",
+# "user",
+# "src_user_id",
+# "src_user_role",
+# "user_id",
+# "user_role",
+# "vendor_account"
+# ]
+# },
+# "investigation_profiles": [
+# "test profile 1",
+# "test profile 2",
+# "test profile 3"
+# ],
+# "next_steps": [
+# "makestreams",
+# "nbtstat",
+# "nslookup"
+# ],
+# "name": "ansible_test_notable",
+# "recommended_actions": [
+# "email",
+# "logevent",
+# "makestreams",
+# "nbtstat"
+# ],
+# "security_domain": "threat",
+# "severity": "high"
+# }
+# ],
+
+# USING DELETED
+# -------------
+
+- name: Example to remove the config
+ splunk.es.splunk_adaptive_response_notable_events:
+ config:
+ - correlation_search_name: Ansible Test
+ state: deleted
+
+# RUN output:
+# -----------
+
+# "after": [],
+# "before": [
+# {
+# "correlation_search_name": "Ansible Test",
+# "description": "test notable event",
+# "drilldown_earliest_offset": "$info_min_time$",
+# "drilldown_latest_offset": "$info_max_time$",
+# "drilldown_name": "test_drill_name",
+# "drilldown_search": "test_drill",
+# "extract_artifacts": {
+# "asset": [
+# "src",
+# "dest",
+# "dvc",
+# "orig_host"
+# ],
+# "identity": [
+# "src_user",
+# "user",
+# "src_user_id",
+# "src_user_role",
+# "user_id",
+# "user_role",
+# "vendor_account"
+# ]
+# },
+# "investigation_profiles": [
+# "test profile 1",
+# "test profile 2",
+# "test profile 3"
+# ],
+# "next_steps": [
+# "makestreams",
+# "nbtstat",
+# "nslookup"
+# ],
+# "name": "ansible_test_notable",
+# "recommended_actions": [
+# "email",
+# "logevent",
+# "makestreams",
+# "nbtstat"
+# ],
+# "security_domain": "threat",
+# "severity": "high"
+# }
+# ]
+"""
+
+RETURN = """
+before:
+ description: The configuration as structured data prior to module invocation.
+ returned: always
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+after:
+ description: The configuration as structured data after module completion.
+ returned: when changed
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+gathered:
+ description: Facts about the network resource gathered from the remote device as structured data.
+ returned: when state is I(gathered)
+ type: dict
+ sample: >
+ This output will always be in the same format as the
+ module argspec.
+"""
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
new file mode 100644
index 000000000..9c865507b
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
@@ -0,0 +1,376 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_correlation_searches
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: True
+ type: str
+ description:
+ description:
+ - Description of the coorelation search, this will populate the description field for the web console
+ required: True
+ type: str
+ state:
+ description:
+ - Add, remove, enable, or disiable a correlation search.
+ required: True
+ choices: [ "present", "absent", "enabled", "disabled" ]
+ type: str
+ search:
+ description:
+ - SPL search string
+ type: str
+ required: True
+ app:
+ description:
+ - Splunk app to associate the correlation seach with
+ type: str
+ required: False
+ default: "SplunkEnterpriseSecuritySuite"
+ ui_dispatch_context:
+ description:
+ - Set an app to use for links such as the drill-down search in a notable
+ event or links in an email adaptive response action. If None, uses the
+ Application Context.
+ type: str
+ required: False
+ time_earliest:
+ description:
+ - Earliest time using relative time modifiers.
+ type: str
+ required: False
+ default: "-24h"
+ time_latest:
+ description:
+ - Latest time using relative time modifiers.
+ type: str
+ required: False
+ default: "now"
+ cron_schedule:
+ description:
+ - Enter a cron-style schedule.
+ - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
+ - Real-time searches use a default schedule of C('*/5 * * * *').
+ type: str
+ required: False
+ default: "*/5 * * * *"
+ scheduling:
+ description:
+ - Controls the way the scheduler computes the next execution time of a scheduled search.
+ - >
+ Learn more:
+ https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
+ type: str
+ required: False
+ default: "real-time"
+ choices:
+ - "real-time"
+ - "continuous"
+ schedule_window:
+ description:
+ - Let report run at any time within a window that opens at its scheduled run time,
+ to improve efficiency when there are many concurrently scheduled reports.
+ The "auto" setting automatically determines the best window width for the report.
+ type: str
+ required: False
+ default: "0"
+ schedule_priority:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "Default"
+ choices:
+ - "Default"
+ - "Higher"
+ - "Highest"
+ trigger_alert_when:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "number of events"
+ choices:
+ - "number of events"
+ - "number of results"
+ - "number of hosts"
+ - "number of sources"
+ trigger_alert_when_condition:
+ description:
+ - Conditional to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "greater than"
+ choices:
+ - "greater than"
+ - "less than"
+ - "equal to"
+ - "not equal to"
+ - "drops by"
+ - "rises by"
+ trigger_alert_when_value:
+ description:
+ - Value to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "10"
+ throttle_window_duration:
+ description:
+ - "How much time to ignore other events that match the field values specified in Fields to group by."
+ type: str
+ required: False
+ throttle_fields_to_group_by:
+ description:
+ - "Type the fields to consider for matching events for throttling."
+ type: str
+ required: False
+ suppress_alerts:
+ description:
+ - "To suppress alerts from this correlation search or not"
+ type: bool
+ required: False
+ default: False
+notes:
+ - >
+ The following options are not yet supported:
+ throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of creating a correlation search with splunk.es.coorelation_search
+ splunk.es.correlation_search:
+ name: "Example Coorelation Search From Ansible"
+ description: "Example Coorelation Search From Ansible, description."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(
+ choices=["present", "absent", "enabled", "disabled"], required=True
+ ),
+ search=dict(required=True, type="str"),
+ app=dict(
+ type="str", required=False, default="SplunkEnterpriseSecuritySuite"
+ ),
+ ui_dispatch_context=dict(type="str", required=False),
+ time_earliest=dict(type="str", required=False, default="-24h"),
+ time_latest=dict(type="str", required=False, default="now"),
+ cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
+ scheduling=dict(
+ type="str",
+ required=False,
+ default="real-time",
+ choices=["real-time", "continuous"],
+ ),
+ schedule_window=dict(type="str", required=False, default="0"),
+ schedule_priority=dict(
+ type="str",
+ required=False,
+ default="Default",
+ choices=["Default", "Higher", "Highest"],
+ ),
+ trigger_alert_when=dict(
+ type="str",
+ required=False,
+ default="number of events",
+ choices=[
+ "number of events",
+ "number of results",
+ "number of hosts",
+ "number of sources",
+ ],
+ ),
+ trigger_alert_when_condition=dict(
+ type="str",
+ required=False,
+ default="greater than",
+ choices=[
+ "greater than",
+ "less than",
+ "equal to",
+ "not equal to",
+ "drops by",
+ "rises by",
+ ],
+ ),
+ trigger_alert_when_value=dict(
+ type="str", required=False, default="10"
+ ),
+ throttle_window_duration=dict(type="str", required=False),
+ throttle_fields_to_group_by=dict(type="str", required=False),
+ suppress_alerts=dict(type="bool", required=False, default=False),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ if module.params["state"] in ["present", "enabled"]:
+ module_disabled_state = False
+ else:
+ module_disabled_state = True
+
+ splunk_request = SplunkRequest(
+ module,
+ override=False,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+ request_post_data["name"] = module.params["name"]
+ request_post_data["action.correlationsearch.enabled"] = "1"
+ request_post_data["is_scheduled"] = True
+ request_post_data["dispatch.rt_backfill"] = True
+ request_post_data["action.correlationsearch.label"] = module.params["name"]
+ request_post_data["description"] = module.params["description"]
+ request_post_data["search"] = module.params["search"]
+ request_post_data["request.ui_dispatch_app"] = module.params["app"]
+ if module.params["ui_dispatch_context"]:
+ request_post_data["request.ui_dispatch_context"] = module.params[
+ "ui_dispatch_context"
+ ]
+ request_post_data["dispatch.earliest_time"] = module.params[
+ "time_earliest"
+ ]
+ request_post_data["dispatch.latest_time"] = module.params["time_latest"]
+ request_post_data["cron_schedule"] = module.params["cron_schedule"]
+ if module.params["scheduling"] == "real-time":
+ request_post_data["realtime_schedule"] = True
+ else:
+ request_post_data["realtime_schedule"] = False
+ request_post_data["schedule_window"] = module.params["schedule_window"]
+ request_post_data["schedule_priority"] = module.params[
+ "schedule_priority"
+ ].lower()
+ request_post_data["alert_type"] = module.params["trigger_alert_when"]
+ request_post_data["alert_comparator"] = module.params[
+ "trigger_alert_when_condition"
+ ]
+ request_post_data["alert_threshold"] = module.params[
+ "trigger_alert_when_value"
+ ]
+ request_post_data["alert.suppress"] = module.params["suppress_alerts"]
+ request_post_data["disabled"] = module_disabled_state
+
+ request_post_data = utils.remove_empties(request_post_data)
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ if query_dict:
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_post_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
+ del request_post_data[
+ "name"
+ ] # If this is present, splunk assumes we're trying to create a new one wit the same name
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "services/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
new file mode 100644
index 000000000..0ab756989
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search_info
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for the query of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: false
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example usage of splunk.es.correlation_search_info
+ splunk.es.correlation_search_info:
+ name: "Name of correlation search"
+ register: scorrelation_search_info
+
+- name: debug display information gathered
+ debug:
+ var: scorrelation_search_info
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(name=dict(required=False, type="str"))
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/json"},
+ )
+
+ if module.params["name"]:
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+ else:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ )
+
+ module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
new file mode 100644
index 000000000..ac834d1b9
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
@@ -0,0 +1,630 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright 2022 Red Hat
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: splunk_correlation_searches
+short_description: Splunk Enterprise Security Correlation searches resource module
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security correlation searches
+ - Tested against Splunk Enterprise Server v8.2.3 with Splunk Enterprise Security v7.0.1
+ installed on it.
+version_added: "2.1.0"
+options:
+ config:
+ description:
+ - Configure file and directory monitoring on the system
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - Name of correlation search
+ type: str
+ required: True
+ disabled:
+ description:
+ - Disable correlation search
+ type: bool
+ default: False
+ description:
+ description:
+ - Description of the coorelation search, this will populate the description field for the web console
+ type: str
+ search:
+ description:
+ - SPL search string
+ type: str
+ app:
+ description:
+ - Splunk app to associate the correlation seach with
+ type: str
+ default: "SplunkEnterpriseSecuritySuite"
+ annotations:
+ description:
+ - Add context from industry standard cyber security mappings in Splunk Enterprise Security
+ or custom annotations
+ type: dict
+ suboptions:
+ cis20:
+ description:
+ - Specify CIS20 annotations
+ type: list
+ elements: str
+ kill_chain_phases:
+ description:
+ - Specify Kill 10 annotations
+ type: list
+ elements: str
+ mitre_attack:
+ description:
+ - Specify MITRE ATTACK annotations
+ type: list
+ elements: str
+ nist:
+ description:
+ - Specify NIST annotations
+ type: list
+ elements: str
+ custom:
+ description:
+ - Specify custom framework and custom annotations
+ type: list
+ elements: dict
+ suboptions:
+ framework:
+ description:
+ - Specify annotation framework
+ type: str
+ custom_annotations:
+ description:
+ - Specify annotations associated with custom framework
+ type: list
+ elements: str
+ ui_dispatch_context:
+ description:
+ - Set an app to use for links such as the drill-down search in a notable
+ event or links in an email adaptive response action. If None, uses the
+ Application Context.
+ type: str
+ time_earliest:
+ description:
+ - Earliest time using relative time modifiers.
+ type: str
+ default: "-24h"
+ time_latest:
+ description:
+ - Latest time using relative time modifiers.
+ type: str
+ default: "now"
+ cron_schedule:
+ description:
+ - Enter a cron-style schedule.
+ - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
+ - Real-time searches use a default schedule of C('*/5 * * * *').
+ type: str
+ default: "*/5 * * * *"
+ scheduling:
+ description:
+ - Controls the way the scheduler computes the next execution time of a scheduled search.
+ - >
+ Learn more:
+ https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
+ type: str
+ default: "realtime"
+ choices:
+ - "realtime"
+ - "continuous"
+ schedule_window:
+ description:
+ - Let report run at any time within a window that opens at its scheduled run time,
+ to improve efficiency when there are many concurrently scheduled reports.
+ The "auto" setting automatically determines the best window width for the report.
+ type: str
+ default: "0"
+ schedule_priority:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ default: "default"
+ choices:
+ - "default"
+ - "higher"
+ - "highest"
+ trigger_alert:
+ description:
+ - Notable response actions and risk response actions are always triggered for each result.
+ Choose whether the trigger is activated once or for each result.
+ type: str
+ default: "once"
+ choices:
+ - "once"
+ - "for each result"
+ trigger_alert_when:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ default: "number of events"
+ choices:
+ - "number of events"
+ - "number of results"
+ - "number of hosts"
+ - "number of sources"
+ trigger_alert_when_condition:
+ description:
+ - Conditional to pass to C(trigger_alert_when)
+ type: str
+ default: "greater than"
+ choices:
+ - "greater than"
+ - "less than"
+ - "equal to"
+ - "not equal to"
+ - "drops by"
+ - "rises by"
+ trigger_alert_when_value:
+ description:
+ - Value to pass to C(trigger_alert_when)
+ type: str
+ default: "10"
+ throttle_window_duration:
+ description:
+ - How much time to ignore other events that match the field values specified in Fields to group by.
+ type: str
+ throttle_fields_to_group_by:
+ description:
+ - Type the fields to consider for matching events for throttling.
+ type: list
+ elements: str
+ suppress_alerts:
+ description:
+ - To suppress alerts from this correlation search or not
+ type: bool
+ default: False
+ running_config:
+ description:
+ - The module, by default, will connect to the remote device and retrieve the current
+ running-config to use as a base for comparing against the contents of source.
+ There are times when it is not desirable to have the task get the current running-config
+ for every task in a playbook. The I(running_config) argument allows the implementer
+ to pass in the configuration to use as the base config for comparison. This
+ value of this option should be the output received from device by executing
+ command.
+ type: str
+ state:
+ description:
+ - The state the configuration should be left in
+ type: str
+ choices:
+ - merged
+ - replaced
+ - deleted
+ - gathered
+ default: merged
+
+author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+# Using gathered
+# --------------
+
+- name: Gather correlation searches config
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ - name: Ansible Test 2
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "annotations": {
+# "cis20": [
+# "test1"
+# ],
+# "custom": [
+# {
+# "custom_annotations": [
+# "test5"
+# ],
+# "framework": "test_framework"
+# }
+# ],
+# "kill_chain_phases": [
+# "test3"
+# ],
+# "mitre_attack": [
+# "test2"
+# ],
+# "nist": [
+# "test4"
+# ]
+# },
+# "app": "DA-ESS-EndpointProtection",
+# "cron_schedule": "*/5 * * * *",
+# "description": "test description",
+# "disabled": false,
+# "name": "Ansible Test",
+# "schedule_priority": "default",
+# "schedule_window": "0",
+# "scheduling": "realtime",
+# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+# 'n.src\" as \"src\" | where \"count\">=6',
+# "suppress_alerts": false,
+# "throttle_fields_to_group_by": [
+# "test_field1"
+# ],
+# "throttle_window_duration": "5s",
+# "time_earliest": "-24h",
+# "time_latest": "now",
+# "trigger_alert": "once",
+# "trigger_alert_when": "number of events",
+# "trigger_alert_when_condition": "greater than",
+# "trigger_alert_when_value": "10",
+# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
+# }
+# ]
+
+# Using merged
+# ------------
+
+- name: Merge and create new correlation searches configuration
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: DA-ESS-EndpointProtection
+ annotations:
+ cis20:
+ - test1
+ mitre_attack:
+ - test2
+ kill_chain_phases:
+ - test3
+ nist:
+ - test4
+ custom:
+ - framework: test_framework
+ custom_annotations:
+ - test5
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: realtime
+ schedule_window: "0"
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: "10"
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ suppress_alerts: False
+ search: >
+ '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+ 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+ 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+ 'n.src\" as \"src\" | where \"count\">=6'
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "annotations": {
+# "cis20": [
+# "test1"
+# ],
+# "custom": [
+# {
+# "custom_annotations": [
+# "test5"
+# ],
+# "framework": "test_framework"
+# }
+# ],
+# "kill_chain_phases": [
+# "test3"
+# ],
+# "mitre_attack": [
+# "test2"
+# ],
+# "nist": [
+# "test4"
+# ]
+# },
+# "app": "DA-ESS-EndpointProtection",
+# "cron_schedule": "*/5 * * * *",
+# "description": "test description",
+# "disabled": false,
+# "name": "Ansible Test",
+# "schedule_priority": "default",
+# "schedule_window": "0",
+# "scheduling": "realtime",
+# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+# 'n.src\" as \"src\" | where \"count\">=6',
+# "suppress_alerts": false,
+# "throttle_fields_to_group_by": [
+# "test_field1"
+# ],
+# "throttle_window_duration": "5s",
+# "time_earliest": "-24h",
+# "time_latest": "now",
+# "trigger_alert": "once",
+# "trigger_alert_when": "number of events",
+# "trigger_alert_when_condition": "greater than",
+# "trigger_alert_when_value": "10",
+# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
+# },
+# ],
+# "before": [],
+
+# Using replaced
+# --------------
+
+- name: Replace existing correlation searches configuration
+ splunk.es.splunk_correlation_searches:
+ state: replaced
+ config:
+ - name: Ansible Test
+ disabled: false
+ description: test description
+ app: SplunkEnterpriseSecuritySuite
+ annotations:
+ cis20:
+ - test1
+ - test2
+ mitre_attack:
+ - test3
+ - test4
+ kill_chain_phases:
+ - test5
+ - test6
+ nist:
+ - test7
+ - test8
+ custom:
+ - framework: test_framework2
+ custom_annotations:
+ - test9
+ - test10
+ ui_dispatch_context: SplunkEnterpriseSecuritySuite
+ time_earliest: -24h
+ time_latest: now
+ cron_schedule: "*/5 * * * *"
+ scheduling: continuous
+ schedule_window: auto
+ schedule_priority: default
+ trigger_alert: once
+ trigger_alert_when: number of events
+ trigger_alert_when_condition: greater than
+ trigger_alert_when_value: 10
+ throttle_window_duration: 5s
+ throttle_fields_to_group_by:
+ - test_field1
+ - test_field2
+ suppress_alerts: True
+ search: >
+ '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+ 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+ 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+ 'n.src\" as \"src\" | where \"count\">=6'
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "annotations": {
+# "cis20": [
+# "test1",
+# "test2"
+# ],
+# "custom": [
+# {
+# "custom_annotations": [
+# "test9",
+# "test10"
+# ],
+# "framework": "test_framework2"
+# }
+# ],
+# "kill_chain_phases": [
+# "test5",
+# "test6"
+# ],
+# "mitre_attack": [
+# "test3",
+# "test4"
+# ],
+# "nist": [
+# "test7",
+# "test8"
+# ]
+# },
+# "app": "SplunkEnterpriseSecuritySuite",
+# "cron_schedule": "*/5 * * * *",
+# "description": "test description",
+# "disabled": false,
+# "name": "Ansible Test",
+# "schedule_priority": "default",
+# "schedule_window": "auto",
+# "scheduling": "continuous",
+# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+# 'n.src\" as \"src\" | where \"count\">=6',
+# "suppress_alerts": true,
+# "throttle_fields_to_group_by": [
+# "test_field1",
+# "test_field2"
+# ],
+# "throttle_window_duration": "5s",
+# "time_earliest": "-24h",
+# "time_latest": "now",
+# "trigger_alert": "once",
+# "trigger_alert_when": "number of events",
+# "trigger_alert_when_condition": "greater than",
+# "trigger_alert_when_value": "10",
+# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
+# }
+# ],
+# "before": [
+# {
+# "annotations": {
+# "cis20": [
+# "test1"
+# ],
+# "custom": [
+# {
+# "custom_annotations": [
+# "test5"
+# ],
+# "framework": "test_framework"
+# }
+# ],
+# "kill_chain_phases": [
+# "test3"
+# ],
+# "mitre_attack": [
+# "test2"
+# ],
+# "nist": [
+# "test4"
+# ]
+# },
+# "app": "DA-ESS-EndpointProtection",
+# "cron_schedule": "*/5 * * * *",
+# "description": "test description",
+# "disabled": false,
+# "name": "Ansible Test",
+# "schedule_priority": "default",
+# "schedule_window": "0",
+# "scheduling": "realtime",
+# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+# 'n.src\" as \"src\" | where \"count\">=6',
+# "suppress_alerts": false,
+# "throttle_fields_to_group_by": [
+# "test_field1"
+# ],
+# "throttle_window_duration": "5s",
+# "time_earliest": "-24h",
+# "time_latest": "now",
+# "trigger_alert": "once",
+# "trigger_alert_when": "number of events",
+# "trigger_alert_when_condition": "greater than",
+# "trigger_alert_when_value": "10",
+# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
+# }
+# ]
+
+# Using deleted
+# -------------
+
+- name: Example to delete the corelation search
+ splunk.es.splunk_correlation_searches:
+ config:
+ - name: Ansible Test
+ state: deleted
+
+# RUN output:
+# -----------
+
+# "after": [],
+# "before": [
+# {
+# "annotations": {
+# "cis20": [
+# "test1"
+# ],
+# "custom": [
+# {
+# "custom_annotations": [
+# "test5"
+# ],
+# "framework": "test_framework"
+# }
+# ],
+# "kill_chain_phases": [
+# "test3"
+# ],
+# "mitre_attack": [
+# "test2"
+# ],
+# "nist": [
+# "test4"
+# ]
+# },
+# "app": "DA-ESS-EndpointProtection",
+# "cron_schedule": "*/5 * * * *",
+# "description": "test description",
+# "disabled": false,
+# "name": "Ansible Test",
+# "schedule_priority": "default",
+# "schedule_window": "0",
+# "scheduling": "realtime",
+# "search": '| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
+# 'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
+# 'led_Authentication\" by \"Authentication.app\",\"Authentication.src\" | rename \"Authentication.app\" as \"app\",\"Authenticatio'
+# 'n.src\" as \"src\" | where \"count\">=6',
+# "suppress_alerts": false,
+# "throttle_fields_to_group_by": [
+# "test_field1"
+# ],
+# "throttle_window_duration": "5s",
+# "time_earliest": "-24h",
+# "time_latest": "now",
+# "trigger_alert": "once",
+# "trigger_alert_when": "number of events",
+# "trigger_alert_when_condition": "greater than",
+# "trigger_alert_when_value": "10",
+# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
+# },
+# ],
+
+"""
+
+RETURN = """
+before:
+ description: The configuration as structured data prior to module invocation.
+ returned: always
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+after:
+ description: The configuration as structured data after module completion.
+ returned: when changed
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+gathered:
+ description: Facts about the network resource gathered from the remote device as structured data.
+ returned: when state is I(gathered)
+ type: dict
+ sample: >
+ This output will always be in the same format as the
+ module argspec.
+"""
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
new file mode 100644
index 000000000..080d23d3b
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
@@ -0,0 +1,264 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_monitor
+short_description: Manage Splunk Data Inputs of type Monitor
+description:
+ - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_data_inputs_monitor
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ name:
+ description:
+ - The file or directory path to monitor on the system.
+ required: True
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: True
+ choices:
+ - "present"
+ - "absent"
+ type: str
+ blacklist:
+ description:
+ - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
+ required: False
+ type: str
+ check_index:
+ description:
+ - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ required: False
+ type: bool
+ default: False
+ check_path:
+ description:
+ - If set to C(True), the name value is checked to ensure that it exists.
+ required: False
+ type: bool
+ crc_salt:
+ description:
+ - A string that modifies the file tracking identity for files in this input.
+ The magic value <SOURCE> invokes special behavior (see admin documentation).
+ required: False
+ type: str
+ disabled:
+ description:
+ - Indicates if input monitoring is disabled.
+ required: False
+ default: False
+ type: bool
+ followTail:
+ description:
+ - If set to C(True), files that are seen for the first time is read from the end.
+ required: False
+ type: bool
+ default: False
+ host:
+ description:
+ - The value to populate in the host field for events from this data input.
+ required: False
+ type: str
+ host_regex:
+ description:
+ - Specify a regular expression for a file path. If the path for a file
+ matches this regular expression, the captured value is used to populate
+ the host field for events from this data input. The regular expression
+ must have one capture group.
+ required: False
+ type: str
+ host_segment:
+ description:
+ - Use the specified slash-separate segment of the filepath as the host field value.
+ required: False
+ type: int
+ ignore_older_than:
+ description:
+ - Specify a time value. If the modification time of a file being monitored
+ falls outside of this rolling time window, the file is no longer being monitored.
+ required: False
+ type: str
+ index:
+ description:
+ - Which index events from this input should be stored in. Defaults to default.
+ required: False
+ type: str
+ recursive:
+ description:
+ - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
+ required: False
+ type: bool
+ default: False
+ rename_source:
+ description:
+ - The value to populate in the source field for events from this data input.
+ The same source should not be used for multiple data inputs.
+ required: False
+ type: str
+ sourcetype:
+ description:
+ - The value to populate in the sourcetype field for incoming events.
+ required: False
+ type: str
+ time_before_close:
+ description:
+ - When Splunk software reaches the end of a file that is being read, the
+ file is kept open for a minimum of the number of seconds specified in
+ this value. After this period has elapsed, the file is checked again for
+ more data.
+ required: False
+ type: int
+ whitelist:
+ description:
+ - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
+ required: False
+ type: str
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input monitor with splunk.es.data_input_monitor
+ splunk.es.data_input_monitor:
+ name: "/var/log/example.log"
+ state: "present"
+ recursive: True
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
+ utils,
+)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ blacklist=dict(required=False, type="str", default=None),
+ check_index=dict(required=False, type="bool", default=False),
+ check_path=dict(required=False, type="bool", default=None),
+ crc_salt=dict(required=False, type="str", default=None),
+ disabled=dict(required=False, type="bool", default=False),
+ followTail=dict(required=False, type="bool", default=False),
+ host=dict(required=False, type="str", default=None),
+ host_segment=dict(required=False, type="int", default=None),
+ host_regex=dict(required=False, type="str", default=None),
+ ignore_older_than=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ recursive=dict(required=False, type="bool", default=False),
+ rename_source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ time_before_close=dict(required=False, type="int", default=None),
+ whitelist=dict(required=False, type="str", default=None),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ # map of keys for the splunk REST API that aren't pythonic so we have to
+ # handle the substitutes
+ keymap = {
+ "check_index": "check-index",
+ "check_path": "check-path",
+ "crc_salt": "crc-salt",
+ "ignore_older_than": "ignore-older-than",
+ "rename_source": "rename-source",
+ "time_before_close": "time-before-close",
+ }
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ keymap=keymap,
+ not_rest_data_keys=["state"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ query_dict = utils.remove_empties(query_dict)
+
+ if module.params["state"] == "present":
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ _data = splunk_request.get_data()
+ _data["name"] = module.params["name"]
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor",
+ data=_data,
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+
+ if module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
new file mode 100644
index 000000000..5771eb9cc
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
@@ -0,0 +1,276 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_network
+short_description: Manage Splunk Data Inputs of type TCP or UDP
+description:
+ - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
+version_added: "1.0.0"
+deprecated:
+ alternative: splunk_data_inputs_network
+ why: Newer and updated modules released with more functionality.
+ removed_at_date: '2024-09-01'
+options:
+ protocol:
+ description:
+ - Choose between tcp or udp
+ required: True
+ choices:
+ - 'tcp'
+ - 'udp'
+ type: str
+ connection_host:
+ description:
+ - Set the host for the remote server that is sending data.
+ - C(ip) sets the host to the IP address of the remote server sending data.
+ - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
+ - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
+ default: "ip"
+ required: False
+ type: str
+ choices:
+ - "ip"
+ - "dns"
+ - "none"
+ state:
+ description:
+ - Enable, disable, create, or destroy
+ choices:
+ - "present"
+ - "absent"
+ - "enabled"
+ - "disable"
+ required: False
+ default: "present"
+ type: str
+ datatype:
+ description: >
+ Forwarders can transmit three types of data: raw, unparsed, or parsed.
+ C(cooked) data refers to parsed and unparsed formats.
+ choices:
+ - "cooked"
+ - "raw"
+ default: "raw"
+ required: False
+ type: str
+ host:
+ description:
+ - Host from which the indexer gets data.
+ required: False
+ type: str
+ index:
+ description:
+ - default Index to store generated events.
+ type: str
+ name:
+ description:
+ - The input port which receives raw data.
+ required: True
+ type: str
+ queue:
+ description:
+ - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
+ - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
+ information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
+ the online documentation at "Monitor files and directories with inputs.conf"
+ - Set queue to indexQueue to send your data directly into the index.
+ choices:
+ - "parsingQueue"
+ - "indexQueue"
+ type: str
+ required: False
+ default: "parsingQueue"
+ rawTcpDoneTimeout:
+ description:
+ - Specifies in seconds the timeout value for adding a Done-key.
+ - If a connection over the port specified by name remains idle after receiving data for specified
+ number of seconds, it adds a Done-key. This implies the last event is completely received.
+ default: 10
+ type: int
+ required: False
+ restrictToHost:
+ description:
+ - Allows for restricting this input to only accept data from the host specified here.
+ required: False
+ type: str
+ ssl:
+ description:
+ - Enable or disble ssl for the data stream
+ required: False
+ type: bool
+ source:
+ description:
+ - Sets the source key/field for events from this input. Defaults to the input file path.
+ - >
+ Sets the source key initial value. The key is used during parsing/indexing, in particular to set
+ the source field during indexing. It is also the source field used at search time. As a convenience,
+ the chosen string is prepended with 'source::'.
+ - >
+ Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
+ more accurate string to aid in problem analysis and investigation, accurately recording the file from
+ which the data was retrieved. Consider use of source types, tagging, and search wildcards before
+ overriding this value.
+ type: str
+ sourcetype:
+ description:
+ - Set the source type for events from this input.
+ - '"sourcetype=" is automatically prepended to <string>.'
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ type: str
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input network with splunk.es.data_input_network
+ splunk.es.data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+"""
+
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+)
+
+
+def main():
+
+ argspec = dict(
+ state=dict(
+ required=False,
+ choices=["present", "absent", "enabled", "disable"],
+ default="present",
+ type="str",
+ ),
+ connection_host=dict(
+ required=False,
+ choices=["ip", "dns", "none"],
+ default="ip",
+ type="str",
+ ),
+ host=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ name=dict(required=True, type="str"),
+ protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
+ queue=dict(
+ required=False,
+ type="str",
+ choices=["parsingQueue", "indexQueue"],
+ default="parsingQueue",
+ ),
+ rawTcpDoneTimeout=dict(required=False, type="int", default=10),
+ restrictToHost=dict(required=False, type="str", default=None),
+ ssl=dict(required=False, type="bool", default=None),
+ source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ datatype=dict(
+ required=False, choices=["cooked", "raw"], default="raw"
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state", "datatype", "protocol"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ _data = splunk_request.get_data()
+ if module.params["state"] in ["present", "enabled"]:
+ _data["disabled"] = False
+ else:
+ _data["disabled"] = True
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(
+ query_dict["entry"][0]["content"][arg]
+ ) != to_text(request_data[arg]):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ ),
+ data=_data,
+ )
+ if module.params["state"] in ["present", "enabled"]:
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ module.exit_json(
+ changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ ),
+ data=_data,
+ )
+ module.exit_json(
+ changed=True, msg="{0} created.", splunk_data=splunk_data
+ )
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
new file mode 100644
index 000000000..0f4922f77
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
@@ -0,0 +1,300 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright 2022 Red Hat
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: splunk_data_inputs_monitor
+short_description: Splunk Data Inputs of type Monitor resource module
+description:
+ - Module to add/modify or delete, File and Directory Monitor Data Inputs in Splunk.
+ - Tested against Splunk Enterprise Server 8.2.3
+version_added: "2.1.0"
+options:
+ config:
+ description:
+ - Configure file and directory monitoring on the system
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The file or directory path to monitor on the system.
+ required: True
+ type: str
+ blacklist:
+ description:
+ - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
+ type: str
+ check_index:
+ description:
+ - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ - This parameter is not returned back by Splunk while obtaining object information.
+ It is therefore left out while performing idempotency checks
+ type: bool
+ check_path:
+ description:
+ - If set to C(True), the name value is checked to ensure that it exists.
+ - This parameter is not returned back by Splunk while obtaining object information.
+ It is therefore left out while performing idempotency checks
+ type: bool
+ crc_salt:
+ description:
+ - A string that modifies the file tracking identity for files in this input.
+ The magic value <SOURCE> invokes special behavior (see admin documentation).
+ type: str
+ disabled:
+ description:
+ - Indicates if input monitoring is disabled.
+ type: bool
+ default: False
+ follow_tail:
+ description:
+ - If set to C(True), files that are seen for the first time is read from the end.
+ type: bool
+ host:
+ description:
+ - The value to populate in the host field for events from this data input.
+ type: str
+ default: "$decideOnStartup"
+ host_regex:
+ description:
+ - Specify a regular expression for a file path. If the path for a file
+ matches this regular expression, the captured value is used to populate
+ the host field for events from this data input. The regular expression
+ must have one capture group.
+ type: str
+ host_segment:
+ description:
+ - Use the specified slash-separate segment of the filepath as the host field value.
+ type: int
+ ignore_older_than:
+ description:
+ - Specify a time value. If the modification time of a file being monitored
+ falls outside of this rolling time window, the file is no longer being monitored.
+ - This parameter is not returned back by Splunk while obtaining object information.
+ It is therefore left out while performing idempotency checks
+ type: str
+ index:
+ description:
+ - Which index events from this input should be stored in. Defaults to default.
+ type: str
+ default: "default"
+ recursive:
+ description:
+ - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
+ type: bool
+ rename_source:
+ description:
+ - The value to populate in the source field for events from this data input.
+ The same source should not be used for multiple data inputs.
+ - This parameter is not returned back by Splunk while obtaining object information.
+ It is therefore left out while performing idempotency checks
+ type: str
+ sourcetype:
+ description:
+ - The value to populate in the sourcetype field for incoming events.
+ type: str
+ time_before_close:
+ description:
+ - When Splunk software reaches the end of a file that is being read, the
+ file is kept open for a minimum of the number of seconds specified in
+ this value. After this period has elapsed, the file is checked again for
+ more data.
+ - This parameter is not returned back by Splunk while obtaining object information.
+ It is therefore left out while performing idempotency checks
+ type: int
+ whitelist:
+ description:
+ - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
+ type: str
+
+ running_config:
+ description:
+ - The module, by default, will connect to the remote device and retrieve the current
+ running-config to use as a base for comparing against the contents of source.
+ There are times when it is not desirable to have the task get the current running-config
+ for every task in a playbook. The I(running_config) argument allows the implementer
+ to pass in the configuration to use as the base config for comparison. This
+ value of this option should be the output received from device by executing
+ command.
+ type: str
+ state:
+ description:
+ - The state the configuration should be left in
+ type: str
+ choices:
+ - merged
+ - replaced
+ - deleted
+ - gathered
+ default: merged
+
+author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+
+# Using gathered
+# --------------
+
+- name: Gather config for specified Data inputs monitors
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ - name: "/var"
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "blacklist": "//var/log/[a-z0-9]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "host_regex": "/(test_host)/gm",
+# "host_segment": 3,
+# "index": "default",
+# "name": "/var/log",
+# "recursive": true,
+# "sourcetype": "test_source",
+# "whitelist": "//var/log/[0-9]/gm"
+# }
+# ]
+#
+
+# Using merged
+# ------------
+
+- name: Update Data inputs monitors config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ blacklist: "//var/log/[a-z]/gm"
+ check_index: True
+ check_path: True
+ crc_salt: <SOURCE>
+ rename_source: "test"
+ whitelist: "//var/log/[0-9]/gm"
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "blacklist": "//var/log/[a-z]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "host_regex": "/(test_host)/gm",
+# "host_segment": 3,
+# "index": "default",
+# "name": "/var/log",
+# "recursive": true,
+# "sourcetype": "test_source",
+# "whitelist": "//var/log/[0-9]/gm"
+# }
+# ],
+# "before": [
+# {
+# "blacklist": "//var/log/[a-z0-9]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "host_regex": "/(test_host)/gm",
+# "host_segment": 3,
+# "index": "default",
+# "name": "/var/log",
+# "recursive": true,
+# "sourcetype": "test_source",
+# "whitelist": "//var/log/[0-9]/gm"
+# }
+# ],
+
+# Using replaced
+# --------------
+
+- name: To Replace Data inputs monitors config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ blacklist: "//var/log/[a-z0-9]/gm"
+ crc_salt: <SOURCE>
+ index: default
+ state: replaced
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "blacklist": "//var/log/[a-z0-9]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "/var/log"
+# }
+# ],
+# "before": [
+# {
+# "blacklist": "//var/log/[a-z0-9]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "host_regex": "/(test_host)/gm",
+# "host_segment": 3,
+# "index": "default",
+# "name": "/var/log",
+# "recursive": true,
+# "sourcetype": "test_source",
+# "whitelist": "//var/log/[0-9]/gm"
+# }
+# ],
+
+# Using deleted
+# -----------
+- name: To Delete Data inpur monitor config
+ splunk.es.splunk_data_inputs_monitor:
+ config:
+ - name: "/var/log"
+ state: deleted
+
+# RUN output:
+# -----------
+#
+# "after": [],
+# "before": [
+# {
+# "blacklist": "//var/log/[a-z0-9]/gm",
+# "crc_salt": "<SOURCE>",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "/var/log"
+# }
+# ],
+
+"""
+
+RETURN = """
+before:
+ description: The configuration as structured data prior to module invocation.
+ returned: always
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+after:
+ description: The configuration as structured data after module completion.
+ returned: when changed
+ type: list
+ sample: The configuration returned will always be in the same format of the parameters above.
+"""
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
new file mode 100644
index 000000000..688e806f1
--- /dev/null
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
@@ -0,0 +1,603 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright 2022 Red Hat
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: splunk_data_inputs_network
+short_description: Manage Splunk Data Inputs of type TCP or UDP resource module
+description:
+ - Module that allows to add/update or delete of TCP and UDP Data Inputs in Splunk.
+version_added: "2.1.0"
+options:
+ config:
+ description:
+ - Manage and preview protocol input data.
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The input port which receives raw data.
+ required: True
+ type: str
+ protocol:
+ description:
+ - Choose whether to manage TCP or UDP inputs
+ required: True
+ choices:
+ - 'tcp'
+ - 'udp'
+ type: str
+ connection_host:
+ description:
+ - Set the host for the remote server that is sending data.
+ - C(ip) sets the host to the IP address of the remote server sending data.
+ - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
+ - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
+ type: str
+ choices:
+ - "ip"
+ - "dns"
+ - "none"
+ datatype:
+ description:
+ - C(cooked) lets one access cooked TCP input information and create new containers for managing cooked data.
+ - C(raw) lets one manage raw tcp inputs from forwarders.
+ - C(splunktcptoken) lets one manage receiver access using tokens.
+ - C(ssl) Provides access to the SSL configuration of a Splunk server.
+ This option does not support states I(deleted) and I(replaced).
+ choices:
+ - "cooked"
+ - "raw"
+ - "splunktcptoken"
+ - "ssl"
+ required: False
+ type: str
+ disabled:
+ description:
+ - Indicates whether the input is disabled.
+ type: bool
+ host:
+ description:
+ - Host from which the indexer gets data.
+ type: str
+ index:
+ description:
+ - default Index to store generated events.
+ type: str
+ no_appending_timestamp:
+ description:
+ - If set to true, prevents Splunk software from prepending a timestamp and hostname to incoming events.
+ - Only for UDP data input configuration.
+ type: bool
+ no_priority_stripping:
+ description:
+ - If set to true, Splunk software does not remove the priority field from incoming syslog events.
+ - Only for UDP data input configuration.
+ type: bool
+ queue:
+ description:
+ - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
+ - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
+ information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
+ the online documentation at "Monitor files and directories with inputs.conf"
+ - Set queue to indexQueue to send your data directly into the index.
+ - Only applicable for "/tcp/raw" and "/udp" APIs
+ choices:
+ - "parsingQueue"
+ - "indexQueue"
+ type: str
+ raw_tcp_done_timeout:
+ description:
+ - Specifies in seconds the timeout value for adding a Done-key.
+ - If a connection over the port specified by name remains idle after receiving data for specified
+ number of seconds, it adds a Done-key. This implies the last event is completely received.
+ - Only for TCP raw input configuration.
+ type: int
+ restrict_to_host:
+ description:
+ - Allows for restricting this input to only accept data from the host specified here.
+ type: str
+ ssl:
+ description:
+ - Enable or disble ssl for the data stream
+ type: bool
+ source:
+ description:
+ - Sets the source key/field for events from this input. Defaults to the input file path.
+ - Sets the source key initial value. The key is used during parsing/indexing, in particular to set
+ the source field during indexing. It is also the source field used at search time. As a convenience,
+ the chosen string is prepended with 'source::'.
+ - Note that Overriding the source key is generally not recommended. Typically, the input layer provides a
+ more accurate string to aid in problem analysis and investigation, accurately recording the file from
+ which the data was retrieved. Consider use of source types, tagging, and search wildcards before
+ overriding this value.
+ type: str
+ sourcetype:
+ description:
+ - Set the source type for events from this input.
+ - '"sourcetype=" is automatically prepended to <string>.'
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ type: str
+ token:
+ description:
+ - Token value to use for SplunkTcpToken. If unspecified, a token is generated automatically.
+ type: str
+ password:
+ description:
+ - Server certificate password, if any.
+ - Only for TCP SSL configuration.
+ type: str
+ require_client_cert:
+ description:
+ - Determines whether a client must authenticate.
+ - Only for TCP SSL configuration.
+ type: str
+ root_ca:
+ description:
+ - Certificate authority list (root file).
+ - Only for TCP SSL configuration.
+ type: str
+ server_cert:
+ description:
+ - Full path to the server certificate.
+ - Only for TCP SSL configuration.
+ type: str
+ cipher_suite:
+ description:
+ - Specifies list of acceptable ciphers to use in ssl.
+ - Only obtained for TCP SSL configuration present on device.
+ type: str
+
+ running_config:
+ description:
+ - The module, by default, will connect to the remote device and retrieve the current
+ running-config to use as a base for comparing against the contents of source.
+ There are times when it is not desirable to have the task get the current running-config
+ for every task in a playbook. The I(running_config) argument allows the implementer
+ to pass in the configuration to use as the base config for comparison. This
+ value of this option should be the output received from device by executing
+ command.
+ type: str
+ state:
+ description:
+ - The state the configuration should be left in
+ type: str
+ choices:
+ - merged
+ - replaced
+ - deleted
+ - gathered
+ default: merged
+
+author: Ansible Security Automation Team (@pranav-bhatt) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+
+# Using gathered
+# --------------
+
+- name: Gathering information about TCP Cooked Inputs
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: cooked
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "connection_host": "ip",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "8101"
+# },
+# {
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "9997"
+# },
+# {
+# "connection_host": "ip",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8101",
+# "restrict_to_host": "default"
+# }
+# ]
+
+
+- name: Gathering information about TCP Cooked Inputs by Name
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: cooked
+ name: 9997
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "datatype": "cooked",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "name": "9997",
+# "protocol": "tcp"
+# }
+# ]
+
+
+- name: Gathering information about TCP Raw Inputs
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "connection_host": "ip",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "8099",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 10
+# },
+# {
+# "connection_host": "ip",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 10,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ]
+
+- name: Gathering information about TCP Raw inputs by Name
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8099
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "8099",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 10
+# }
+# ]
+
+- name: Gathering information about TCP SSL configuration
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: ssl
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "cipher_suite": <cipher-suites>,
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "test_host"
+# }
+# ]
+
+- name: Gathering information about TCP SplunkTcpTokens
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: splunktcptoken
+ state: gathered
+
+# RUN output:
+# -----------
+
+# "gathered": [
+# {
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "splunktcptoken://test_token1",
+# "token": <token1>
+# },
+# {
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "splunktcptoken://test_token2",
+# "token": <token2>
+# }
+# ]
+
+# Using merged
+# ------------
+
+- name: To add the TCP raw config
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ raw_tcp_done_timeout: 9
+ restrict_to_host: default
+ queue: parsingQueue
+ source: test_source
+ sourcetype: test_source_type
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 9,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ],
+# "before": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 10,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ]
+
+- name: To add the TCP cooked config
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: cooked
+ name: 8101
+ connection_host: ip
+ disabled: False
+ restrict_to_host: default
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "connection_host": "ip",
+# "datatype": "cooked",
+# "disabled": false,
+# "host": "$decideOnStartup",
+# "name": "default:8101",
+# "protocol": "tcp",
+# "restrict_to_host": "default"
+# }
+# ],
+# "before": [
+# {
+# "connection_host": "ip",
+# "datatype": "cooked",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "name": "default:8101",
+# "protocol": "tcp",
+# "restrict_to_host": "default"
+# }
+# ],
+
+- name: To add the Splunk TCP token
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: splunktcptoken
+ name: test_token
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "datatype": "splunktcptoken",
+# "name": "splunktcptoken://test_token",
+# "protocol": "tcp",
+# "token": <token>
+# }
+# ],
+# "before": [],
+
+- name: To add the Splunk SSL
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: ssl
+ name: test_host
+ root_ca: {root CA directory}
+ server_cert: {server cretificate directory}
+ state: merged
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "cipher_suite": <cipher suite>,
+# "datatype": "ssl",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "test_host",
+# "protocol": "tcp"
+# }
+# ],
+# "before": []
+
+
+# Using deleted
+# -------------
+
+- name: To Delete TCP Raw
+ splunk.es.splunk_data_inputs_network:
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: default:8100
+ state: deleted
+
+# RUN output:
+# -----------
+
+# "after": [],
+# "before": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 9,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ]
+
+# Using replaced
+# --------------
+
+- name: Replace existing data inputs networks configuration
+ register: result
+ splunk.es.splunk_data_inputs_network:
+ state: replaced
+ config:
+ - protocol: tcp
+ datatype: raw
+ name: 8100
+ connection_host: ip
+ disabled: True
+ host: "$decideOnStartup"
+ index: default
+ queue: parsingQueue
+ raw_tcp_done_timeout: 10
+ restrict_to_host: default
+ source: test_source
+ sourcetype: test_source_type
+
+# RUN output:
+# -----------
+
+# "after": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 9,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ],
+# "before": [
+# {
+# "connection_host": "ip",
+# "datatype": "raw",
+# "disabled": true,
+# "host": "$decideOnStartup",
+# "index": "default",
+# "name": "default:8100",
+# "protocol": "tcp",
+# "queue": "parsingQueue",
+# "raw_tcp_done_timeout": 10,
+# "restrict_to_host": "default",
+# "source": "test_source",
+# "sourcetype": "test_source_type"
+# }
+# ],
+
+"""
+
+RETURN = """
+before:
+ description: The configuration prior to the module execution.
+ returned: when state is I(merged), I(replaced), I(deleted)
+ type: list
+ sample: >
+ This output will always be in the same format as the
+ module argspec.
+after:
+ description: The resulting configuration after module execution.
+ returned: when changed
+ type: list
+ sample: >
+ This output will always be in the same format as the
+ module argspec.
+gathered:
+ description: Facts about the network resource gathered from the remote device as structured data.
+ returned: when state is I(gathered)
+ type: dict
+ sample: >
+ This output will always be in the same format as the
+ module argspec.
+"""