summaryrefslogtreecommitdiffstats
path: root/collections-debian-merged/ansible_collections/splunk/es/plugins/modules
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-14 20:03:01 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-14 20:03:01 +0000
commita453ac31f3428614cceb99027f8efbdb9258a40b (patch)
treef61f87408f32a8511cbd91799f9cececb53e0374 /collections-debian-merged/ansible_collections/splunk/es/plugins/modules
parentInitial commit. (diff)
downloadansible-upstream.tar.xz
ansible-upstream.zip
Adding upstream version 2.10.7+merged+base+2.10.8+dfsg.upstream/2.10.7+merged+base+2.10.8+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'collections-debian-merged/ansible_collections/splunk/es/plugins/modules')
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py417
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search.py351
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py78
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py258
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_network.py272
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py417
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py351
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py78
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py258
-rw-r--r--collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py272
10 files changed, 2752 insertions, 0 deletions
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
new file mode 100644
index 00000000..631857a7
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
@@ -0,0 +1,417 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: adaptive_response_notable_event
+short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security Notable Event Adaptive Responses that are associated
+ with a correlation search
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of notable event
+ required: true
+ type: str
+ correlation_search_name:
+ description:
+ - Name of correlation search to associate this notable event adaptive response with
+ required: true
+ type: str
+ description:
+ description:
+ - Description of the notable event, this will populate the description field for the web console
+ required: true
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: true
+ choices: [ "present", "absent" ]
+ type: str
+ security_domain:
+ description:
+ - Splunk Security Domain
+ type: str
+ required: False
+ choices:
+ - "access"
+ - "endpoint"
+ - "network"
+ - "threat"
+ - "identity"
+ - "audit"
+ default: "threat"
+ severity:
+ description:
+ - Severity rating
+ type: str
+ required: False
+ choices:
+ - "informational"
+ - "low"
+ - "medium"
+ - "high"
+ - "critical"
+ - "unknown"
+ default: "high"
+ default_owner:
+ description:
+ - Default owner of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ default_status:
+ description:
+ - Default status of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ choices:
+ - "unassigned"
+ - "new"
+ - "in progress"
+ - "pending"
+ - "resolved"
+ - "closed"
+ drill_down_name:
+ description:
+ - Name for drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_search:
+ description:
+ - Drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_earliest_offset:
+ description:
+ - Set the amount of time before the triggering event to search for related
+ events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
+ to match the earliest time of the search
+ type: str
+ required: False
+ default: \"$info_min_time$\"
+ drill_down_latest_offset:
+ description:
+ - Set the amount of time after the triggering event to search for related
+ events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
+ time to match the latest time of the search
+ type: str
+ required: False
+ default: \"$info_max_time$\"
+ investigation_profiles:
+ description:
+ - Investigation profile to assiciate the notable event with.
+ type: str
+ required: False
+ next_steps:
+ description:
+ - List of adaptive responses that should be run next
+ - Describe next steps and response actions that an analyst could take to address this threat.
+ type: list
+ required: False
+ recommended_actions:
+ description:
+ - List of adaptive responses that are recommended to be run next
+ - Identifying Recommended Adaptive Responses will highlight those actions
+ for the analyst when looking at the list of response actions available,
+ making it easier to find them among the longer list of available actions.
+ type: list
+ required: False
+ asset_extraction:
+ description:
+ - list of assets to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ choices:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ default:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ required: False
+ identity_extraction:
+ description:
+ - list of identity fields to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ choices:
+ - user
+ - src_user
+ default:
+ - user
+ - src_user
+ required: False
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of using splunk.es.adaptive_response_notable_event module
+ splunk.es.adaptive_response_notable_event:
+ name: "Example notable event from Ansible"
+ correlation_search_name: "Example Correlation Search From Ansible"
+ description: "Example notable event from Ansible, description."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+import json
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ correlation_search_name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ security_domain=dict(
+ choices=["access", "endpoint", "network", "threat", "identity", "audit"],
+ required=False,
+ default="threat",
+ ),
+ severity=dict(
+ choices=["informational", "low", "medium", "high", "critical", "unknown"],
+ required=False,
+ default="high",
+ ),
+ default_owner=dict(required=False, type="str"),
+ default_status=dict(
+ choices=[
+ "unassigned",
+ "new",
+ "in progress",
+ "pending",
+ "resolved",
+ "closed",
+ ],
+ required=False,
+ default="",
+ ),
+ drill_down_name=dict(required=False, type="str"),
+ drill_down_search=dict(required=False, type="str"),
+ drill_down_earliest_offset=dict(
+ required=False, type="str", default="$info_min_time$"
+ ),
+ drill_down_latest_offset=dict(
+ required=False, type="str", default="$info_max_time$"
+ ),
+ investigation_profiles=dict(required=False, type="str"),
+ next_steps=dict(required=False, type="list", default=[]),
+ recommended_actions=dict(required=False, type="list", default=[]),
+ asset_extraction=dict(
+ required=False,
+ type="list",
+ default=["src", "dest", "dvc", "orig_host"],
+ choices=["src", "dest", "dvc", "orig_host"],
+ ),
+ identity_extraction=dict(
+ required=False,
+ type="list",
+ default=["user", "src_user"],
+ choices=["user", "src_user"],
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ )
+ )
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+
+ # FIXME need to figure out how to properly support these, the possible values appear to
+ # be dynamically created based on what the search is indexing
+ # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
+ # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
+ if module.params["next_steps"]:
+ if len(module.params["next_steps"]) == 1:
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
+ else:
+ next_steps = ""
+ for next_step in module.params["next_steps"]:
+ if next_steps:
+ next_steps += "\n[[action|{0}]]".format(next_step)
+ else:
+ next_steps = "[[action|{0}]]".format(next_step)
+
+ # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ next_steps_dict = {"version": 1, "data": next_steps}
+ request_post_data["action.notable.param.next_steps"] = json.dumps(
+ next_steps_dict
+ )
+
+ if module.params["recommended_actions"]:
+ if len(module.params["recommended_actions"]) == 1:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = module.params["recommended_actions"][0]
+ else:
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"]
+ )
+
+ request_post_data["action.notable.param.rule_description"] = module.params[
+ "description"
+ ]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params[
+ "security_domain"
+ ]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params[
+ "asset_extraction"
+ ]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
+ ]
+
+ # NOTE: this field appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ request_post_data["action.notable.param.verbose"] = "0"
+
+ if module.params["default_owner"]:
+ request_post_data["action.notable.param.default_owner"] = module.params[
+ "default_owner"
+ ]
+
+ if module.params["default_status"]:
+ request_post_data["action.notable.param.default_status"] = module.params[
+ "default_status"
+ ]
+
+ if query_dict:
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
+ if "actions" in query_dict["entry"][0]["content"]:
+ if query_dict["entry"][0]["content"]["actions"] == "notable":
+ pass
+ elif (
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
+ ):
+ request_post_data["actions"] = (
+ query_dict["entry"][0]["content"]["actions"] + ", notable"
+ )
+ else:
+ request_post_data["actions"] = "notable"
+ else:
+ module.fail_json(
+ msg="Unable to find correlation search: {0}", splunk_data=splunk_data
+ )
+
+ if module.params["state"] == "present":
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
+ splunk_data=splunk_data,
+ )
+
+ if module.params["state"] == "absent":
+ # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ needs_change = True
+ del query_dict["entry"][0]["content"][arg]
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search.py
new file mode 100644
index 00000000..1d45c3a3
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search.py
@@ -0,0 +1,351 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: True
+ type: str
+ description:
+ description:
+ - Description of the coorelation search, this will populate the description field for the web console
+ required: True
+ type: str
+ state:
+ description:
+ - Add, remove, enable, or disiable a correlation search.
+ required: True
+ choices: [ "present", "absent", "enabled", "disabled" ]
+ type: str
+ search:
+ description:
+ - SPL search string
+ type: str
+ required: True
+ app:
+ description:
+ - Splunk app to associate the correlation seach with
+ type: str
+ required: False
+ default: "SplunkEnterpriseSecuritySuite"
+ ui_dispatch_context:
+ description:
+ - Set an app to use for links such as the drill-down search in a notable
+ event or links in an email adaptive response action. If None, uses the
+ Application Context.
+ type: str
+ required: False
+ time_earliest:
+ description:
+ - Earliest time using relative time modifiers.
+ type: str
+ required: False
+ default: "-24h"
+ time_latest:
+ description:
+ - Latest time using relative time modifiers.
+ type: str
+ required: False
+ default: "now"
+ cron_schedule:
+ description:
+ - Enter a cron-style schedule.
+ - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
+ - Real-time searches use a default schedule of C('*/5 * * * *').
+ type: str
+ required: False
+ default: "*/5 * * * *"
+ scheduling:
+ description:
+ - Controls the way the scheduler computes the next execution time of a scheduled search.
+ - >
+ Learn more:
+ https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
+ type: str
+ required: False
+ default: "real-time"
+ choices:
+ - "real-time"
+ - "continuous"
+ schedule_window:
+ description:
+ - Let report run at any time within a window that opens at its scheduled run time,
+ to improve efficiency when there are many concurrently scheduled reports.
+ The "auto" setting automatically determines the best window width for the report.
+ type: str
+ required: False
+ default: "0"
+ schedule_priority:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "Default"
+ choices:
+ - "Default"
+ - "Higher"
+ - "Highest"
+ trigger_alert_when:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "number of events"
+ choices:
+ - "number of events"
+ - "number of results"
+ - "number of hosts"
+ - "number of sources"
+ trigger_alert_when_condition:
+ description:
+ - Conditional to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "greater than"
+ choices:
+ - "greater than"
+ - "less than"
+ - "equal to"
+ - "not equal to"
+ - "drops by"
+ - "rises by"
+ trigger_alert_when_value:
+ description:
+ - Value to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "10"
+ throttle_window_duration:
+ description:
+ - "How much time to ignore other events that match the field values specified in Fields to group by."
+ type: str
+ required: False
+ throttle_fields_to_group_by:
+ description:
+ - "Type the fields to consider for matching events for throttling."
+ type: str
+ required: False
+ suppress_alerts:
+ description:
+ - "To suppress alerts from this correlation search or not"
+ type: bool
+ required: False
+ default: False
+notes:
+ - >
+ The following options are not yet supported:
+ throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of creating a correlation search with splunk.es.coorelation_search
+ splunk.es.correlation_search:
+ name: "Example Coorelation Search From Ansible"
+ description: "Example Coorelation Search From Ansible, description."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
+ search=dict(required=True, type="str"),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
+ ui_dispatch_context=dict(type="str", required=False),
+ time_earliest=dict(type="str", required=False, default="-24h"),
+ time_latest=dict(type="str", required=False, default="now"),
+ cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
+ scheduling=dict(
+ type="str",
+ required=False,
+ default="real-time",
+ choices=["real-time", "continuous"],
+ ),
+ schedule_window=dict(type="str", required=False, default="0"),
+ schedule_priority=dict(
+ type="str",
+ required=False,
+ default="Default",
+ choices=["Default", "Higher", "Highest"],
+ ),
+ trigger_alert_when=dict(
+ type="str",
+ required=False,
+ default="number of events",
+ choices=[
+ "number of events",
+ "number of results",
+ "number of hosts",
+ "number of sources",
+ ],
+ ),
+ trigger_alert_when_condition=dict(
+ type="str",
+ required=False,
+ default="greater than",
+ choices=[
+ "greater than",
+ "less than",
+ "equal to",
+ "not equal to",
+ "drops by",
+ "rises by",
+ ],
+ ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
+ throttle_window_duration=dict(type="str", required=False),
+ throttle_fields_to_group_by=dict(type="str", required=False),
+ suppress_alerts=dict(type="bool", required=False, default=False),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+ if module.params["state"] in ["present", "enabled"]:
+ module_disabled_state = False
+ else:
+ module_disabled_state = True
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+ request_post_data["name"] = module.params["name"]
+ request_post_data["action.correlationsearch.enabled"] = "1"
+ request_post_data["is_scheduled"] = True
+ request_post_data["dispatch.rt_backfill"] = True
+ request_post_data["action.correlationsearch.label"] = module.params["name"]
+ request_post_data["description"] = module.params["description"]
+ request_post_data["search"] = module.params["search"]
+ request_post_data["request.ui_dispatch_app"] = module.params["app"]
+ if module.params["ui_dispatch_context"]:
+ request_post_data["request.ui_dispatch_context"] = module.params[
+ "ui_dispatch_context"
+ ]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
+ request_post_data["dispatch.latest_time"] = module.params["time_latest"]
+ request_post_data["cron_schedule"] = module.params["cron_schedule"]
+ if module.params["scheduling"] == "real-time":
+ request_post_data["realtime_schedule"] = True
+ else:
+ request_post_data["realtime_schedule"] = False
+ request_post_data["schedule_window"] = module.params["schedule_window"]
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
+ request_post_data["alert_type"] = module.params["trigger_alert_when"]
+ request_post_data["alert_comparator"] = module.params[
+ "trigger_alert_when_condition"
+ ]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
+ request_post_data["alert.suppress"] = module.params["suppress_alerts"]
+ request_post_data["disabled"] = module_disabled_state
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ if query_dict:
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
+ del request_post_data[
+ "name"
+ ] # If this is present, splunk assumes we're trying to create a new one wit the same name
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"]))
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
new file mode 100644
index 00000000..b6aed3c0
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search_info
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for the query of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: false
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example usage of splunk.es.correlation_search_info
+ splunk.es.correlation_search_info:
+ name: "Name of correlation search"
+ register: scorrelation_search_info
+
+- name: debug display information gathered
+ debug:
+ var: scorrelation_search_info
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
+
+def main():
+
+ argspec = dict(name=dict(required=False, type="str"))
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(module, headers={"Content-Type": "application/json"})
+
+ if module.params["name"]:
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+ else:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ )
+
+ module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
new file mode 100644
index 00000000..abe1bd2e
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
@@ -0,0 +1,258 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_monitor
+short_description: Manage Splunk Data Inputs of type Monitor
+description:
+ - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - The file or directory path to monitor on the system.
+ required: True
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: True
+ choices:
+ - "present"
+ - "absent"
+ type: str
+ blacklist:
+ description:
+ - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
+ required: False
+ type: str
+ check_index:
+ description:
+ - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ required: False
+ type: bool
+ default: False
+ check_path:
+ description:
+ - If set to C(True), the name value is checked to ensure that it exists.
+ required: False
+ type: bool
+ crc_salt:
+ description:
+ - A string that modifies the file tracking identity for files in this input.
+ The magic value <SOURCE> invokes special behavior (see admin documentation).
+ required: False
+ type: str
+ disabled:
+ description:
+ - Indicates if input monitoring is disabled.
+ required: False
+ default: False
+ type: bool
+ followTail:
+ description:
+ - If set to C(True), files that are seen for the first time is read from the end.
+ required: False
+ type: bool
+ default: False
+ host:
+ description:
+ - The value to populate in the host field for events from this data input.
+ required: False
+ type: str
+ host_regex:
+ description:
+ - Specify a regular expression for a file path. If the path for a file
+ matches this regular expression, the captured value is used to populate
+ the host field for events from this data input. The regular expression
+ must have one capture group.
+ required: False
+ type: str
+ host_segment:
+ description:
+ - Use the specified slash-separate segment of the filepath as the host field value.
+ required: False
+ type: int
+ ignore_older_than:
+ description:
+ - Specify a time value. If the modification time of a file being monitored
+ falls outside of this rolling time window, the file is no longer being monitored.
+ required: False
+ type: str
+ index:
+ description:
+ - Which index events from this input should be stored in. Defaults to default.
+ required: False
+ type: str
+ recursive:
+ description:
+ - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
+ required: False
+ type: bool
+ default: False
+ rename_source:
+ description:
+ - The value to populate in the source field for events from this data input.
+ The same source should not be used for multiple data inputs.
+ required: False
+ type: str
+ sourcetype:
+ description:
+ - The value to populate in the sourcetype field for incoming events.
+ required: False
+ type: str
+ time_before_close:
+ description:
+ - When Splunk software reaches the end of a file that is being read, the
+ file is kept open for a minimum of the number of seconds specified in
+ this value. After this period has elapsed, the file is checked again for
+ more data.
+ required: False
+ type: int
+ whitelist:
+ description:
+ - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
+ required: False
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input monitor with splunk.es.data_input_monitor
+ splunk.es.data_input_monitor:
+ name: "/var/log/example.log"
+ state: "present"
+ recursive: True
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ blacklist=dict(required=False, type="str", default=None),
+ check_index=dict(required=False, type="bool", default=False),
+ check_path=dict(required=False, type="bool", default=None),
+ crc_salt=dict(required=False, type="str", default=None),
+ disabled=dict(required=False, type="bool", default=False),
+ followTail=dict(required=False, type="bool", default=False),
+ host=dict(required=False, type="str", default=None),
+ host_segment=dict(required=False, type="int", default=None),
+ host_regex=dict(required=False, type="str", default=None),
+ ignore_older_than=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ recursive=dict(required=False, type="bool", default=False),
+ rename_source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ time_before_close=dict(required=False, type="int", default=None),
+ whitelist=dict(required=False, type="str", default=None),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ # map of keys for the splunk REST API that aren't pythonic so we have to
+ # handle the substitutes
+ keymap = {
+ "check_index": "check-index",
+ "check_path": "check-path",
+ "crc_salt": "crc-salt",
+ "ignore_older_than": "ignore-older-than",
+ "rename_source": "rename-source",
+ "time_before_close": "time-before-close",
+ }
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ keymap=keymap,
+ not_rest_data_keys=["state"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+
+ if module.params["state"] == "present":
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ _data = splunk_request.get_data()
+ _data["name"] = module.params["name"]
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor", data=urlencode(_data)
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+
+ if module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_network.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_network.py
new file mode 100644
index 00000000..fba8feba
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/data_input_network.py
@@ -0,0 +1,272 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_network
+short_description: Manage Splunk Data Inputs of type TCP or UDP
+description:
+ - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
+version_added: "1.0.0"
+options:
+ protocol:
+ description:
+ - Choose between tcp or udp
+ required: True
+ choices:
+ - 'tcp'
+ - 'udp'
+ type: str
+ connection_host:
+ description:
+ - Set the host for the remote server that is sending data.
+ - C(ip) sets the host to the IP address of the remote server sending data.
+ - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
+ - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
+ default: "ip"
+ required: False
+ type: str
+ choices:
+ - "ip"
+ - "dns"
+ - "none"
+ state:
+ description:
+ - Enable, disable, create, or destroy
+ choices:
+ - "present"
+ - "absent"
+ - "enabled"
+ - "disable"
+ required: False
+ default: "present"
+ type: str
+ datatype:
+ description: >
+ Forwarders can transmit three types of data: raw, unparsed, or parsed.
+ C(cooked) data refers to parsed and unparsed formats.
+ choices:
+ - "cooked"
+ - "raw"
+ default: "raw"
+ required: False
+ type: str
+ host:
+ description:
+ - Host from which the indexer gets data.
+ required: False
+ type: str
+ index:
+ description:
+ - default Index to store generated events.
+ type: str
+ name:
+ description:
+ - The input port which receives raw data.
+ required: True
+ type: str
+ queue:
+ description:
+ - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
+ - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
+ information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
+ the online documentation at "Monitor files and directories with inputs.conf"
+ - Set queue to indexQueue to send your data directly into the index.
+ choices:
+ - "parsingQueue"
+ - "indexQueue"
+ type: str
+ required: False
+ default: "parsingQueue"
+ rawTcpDoneTimeout:
+ description:
+ - Specifies in seconds the timeout value for adding a Done-key.
+ - If a connection over the port specified by name remains idle after receiving data for specified
+ number of seconds, it adds a Done-key. This implies the last event is completely received.
+ default: 10
+ type: int
+ required: False
+ restrictToHost:
+ description:
+ - Allows for restricting this input to only accept data from the host specified here.
+ required: False
+ type: str
+ ssl:
+ description:
+ - Enable or disble ssl for the data stream
+ required: False
+ type: bool
+ source:
+ description:
+ - Sets the source key/field for events from this input. Defaults to the input file path.
+ - >
+ Sets the source key initial value. The key is used during parsing/indexing, in particular to set
+ the source field during indexing. It is also the source field used at search time. As a convenience,
+ the chosen string is prepended with 'source::'.
+ - >
+ Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
+ more accurate string to aid in problem analysis and investigation, accurately recording the file from
+ which the data was retrieved. Consider use of source types, tagging, and search wildcards before
+ overriding this value.
+ type: str
+ sourcetype:
+ description:
+ - Set the source type for events from this input.
+ - '"sourcetype=" is automatically prepended to <string>.'
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input network with splunk.es.data_input_network
+ splunk.es.data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+"""
+
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ state=dict(
+ required=False,
+ choices=["present", "absent", "enabled", "disable"],
+ default="present",
+ type="str",
+ ),
+ connection_host=dict(
+ required=False, choices=["ip", "dns", "none"], default="ip", type="str"
+ ),
+ host=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ name=dict(required=True, type="str"),
+ protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
+ queue=dict(
+ required=False,
+ type="str",
+ choices=["parsingQueue", "indexQueue"],
+ default="parsingQueue",
+ ),
+ rawTcpDoneTimeout=dict(required=False, type="int", default=10),
+ restrictToHost=dict(required=False, type="str", default=None),
+ ssl=dict(required=False, type="bool", default=None),
+ source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state", "datatype", "protocol"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ _data = splunk_request.get_data()
+ if module.params["state"] in ["present", "enabled"]:
+ _data["disabled"] = False
+ else:
+ _data["disabled"] = True
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ data=urlencode(_data),
+ )
+ )
+ if module.params["state"] in ["present", "enabled"]:
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ module.exit_json(
+ changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ ),
+ data=urlencode(_data),
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
new file mode 100644
index 00000000..631857a7
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
@@ -0,0 +1,417 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: adaptive_response_notable_event
+short_description: Manage Splunk Enterprise Security Notable Event Adaptive Responses
+description:
+ - This module allows for creation, deletion, and modification of Splunk
+ Enterprise Security Notable Event Adaptive Responses that are associated
+ with a correlation search
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of notable event
+ required: true
+ type: str
+ correlation_search_name:
+ description:
+ - Name of correlation search to associate this notable event adaptive response with
+ required: true
+ type: str
+ description:
+ description:
+ - Description of the notable event, this will populate the description field for the web console
+ required: true
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: true
+ choices: [ "present", "absent" ]
+ type: str
+ security_domain:
+ description:
+ - Splunk Security Domain
+ type: str
+ required: False
+ choices:
+ - "access"
+ - "endpoint"
+ - "network"
+ - "threat"
+ - "identity"
+ - "audit"
+ default: "threat"
+ severity:
+ description:
+ - Severity rating
+ type: str
+ required: False
+ choices:
+ - "informational"
+ - "low"
+ - "medium"
+ - "high"
+ - "critical"
+ - "unknown"
+ default: "high"
+ default_owner:
+ description:
+ - Default owner of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ default_status:
+ description:
+ - Default status of the notable event, if unset it will default to Splunk System Defaults
+ type: str
+ required: False
+ choices:
+ - "unassigned"
+ - "new"
+ - "in progress"
+ - "pending"
+ - "resolved"
+ - "closed"
+ drill_down_name:
+ description:
+ - Name for drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_search:
+ description:
+ - Drill down search, Supports variable substitution with fields from the matching event.
+ type: str
+ required: False
+ drill_down_earliest_offset:
+ description:
+ - Set the amount of time before the triggering event to search for related
+ events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
+ to match the earliest time of the search
+ type: str
+ required: False
+ default: \"$info_min_time$\"
+ drill_down_latest_offset:
+ description:
+ - Set the amount of time after the triggering event to search for related
+ events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
+ time to match the latest time of the search
+ type: str
+ required: False
+ default: \"$info_max_time$\"
+ investigation_profiles:
+ description:
+ - Investigation profile to assiciate the notable event with.
+ type: str
+ required: False
+ next_steps:
+ description:
+ - List of adaptive responses that should be run next
+ - Describe next steps and response actions that an analyst could take to address this threat.
+ type: list
+ required: False
+ recommended_actions:
+ description:
+ - List of adaptive responses that are recommended to be run next
+ - Identifying Recommended Adaptive Responses will highlight those actions
+ for the analyst when looking at the list of response actions available,
+ making it easier to find them among the longer list of available actions.
+ type: list
+ required: False
+ asset_extraction:
+ description:
+ - list of assets to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ choices:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ default:
+ - src
+ - dest
+ - dvc
+ - orig_host
+ required: False
+ identity_extraction:
+ description:
+ - list of identity fields to extract, select any one or many of the available choices
+ - defaults to all available choices
+ type: list
+ choices:
+ - user
+ - src_user
+ default:
+ - user
+ - src_user
+ required: False
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of using splunk.es.adaptive_response_notable_event module
+ splunk.es.adaptive_response_notable_event:
+ name: "Example notable event from Ansible"
+ correlation_search_name: "Example Correlation Search From Ansible"
+ description: "Example notable event from Ansible, description."
+ state: "present"
+ next_steps:
+ - ping
+ - nslookup
+ recommended_actions:
+ - script
+ - ansiblesecurityautomation
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+import json
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ correlation_search_name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ security_domain=dict(
+ choices=["access", "endpoint", "network", "threat", "identity", "audit"],
+ required=False,
+ default="threat",
+ ),
+ severity=dict(
+ choices=["informational", "low", "medium", "high", "critical", "unknown"],
+ required=False,
+ default="high",
+ ),
+ default_owner=dict(required=False, type="str"),
+ default_status=dict(
+ choices=[
+ "unassigned",
+ "new",
+ "in progress",
+ "pending",
+ "resolved",
+ "closed",
+ ],
+ required=False,
+ default="",
+ ),
+ drill_down_name=dict(required=False, type="str"),
+ drill_down_search=dict(required=False, type="str"),
+ drill_down_earliest_offset=dict(
+ required=False, type="str", default="$info_min_time$"
+ ),
+ drill_down_latest_offset=dict(
+ required=False, type="str", default="$info_max_time$"
+ ),
+ investigation_profiles=dict(required=False, type="str"),
+ next_steps=dict(required=False, type="list", default=[]),
+ recommended_actions=dict(required=False, type="list", default=[]),
+ asset_extraction=dict(
+ required=False,
+ type="list",
+ default=["src", "dest", "dvc", "orig_host"],
+ choices=["src", "dest", "dvc", "orig_host"],
+ ),
+ identity_extraction=dict(
+ required=False,
+ type="list",
+ default=["user", "src_user"],
+ choices=["user", "src_user"],
+ ),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ )
+ )
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+
+ # FIXME need to figure out how to properly support these, the possible values appear to
+ # be dynamically created based on what the search is indexing
+ # request_post_data['action.notable.param.extract_assets'] = '[\"src\",\"dest\",\"dvc\",\"orig_host\"]'
+ # request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
+ if module.params["next_steps"]:
+ if len(module.params["next_steps"]) == 1:
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
+ else:
+ next_steps = ""
+ for next_step in module.params["next_steps"]:
+ if next_steps:
+ next_steps += "\n[[action|{0}]]".format(next_step)
+ else:
+ next_steps = "[[action|{0}]]".format(next_step)
+
+ # NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ next_steps_dict = {"version": 1, "data": next_steps}
+ request_post_data["action.notable.param.next_steps"] = json.dumps(
+ next_steps_dict
+ )
+
+ if module.params["recommended_actions"]:
+ if len(module.params["recommended_actions"]) == 1:
+ request_post_data[
+ "action.notable.param.recommended_actions"
+ ] = module.params["recommended_actions"][0]
+ else:
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"]
+ )
+
+ request_post_data["action.notable.param.rule_description"] = module.params[
+ "description"
+ ]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params[
+ "security_domain"
+ ]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params[
+ "asset_extraction"
+ ]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
+ ]
+
+ # NOTE: this field appears to be hard coded when you create this via the splunk web UI
+ # but I don't know what it is/means because there's no docs on it
+ request_post_data["action.notable.param.verbose"] = "0"
+
+ if module.params["default_owner"]:
+ request_post_data["action.notable.param.default_owner"] = module.params[
+ "default_owner"
+ ]
+
+ if module.params["default_status"]:
+ request_post_data["action.notable.param.default_status"] = module.params[
+ "default_status"
+ ]
+
+ if query_dict:
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
+ if "actions" in query_dict["entry"][0]["content"]:
+ if query_dict["entry"][0]["content"]["actions"] == "notable":
+ pass
+ elif (
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
+ ):
+ request_post_data["actions"] = (
+ query_dict["entry"][0]["content"]["actions"] + ", notable"
+ )
+ else:
+ request_post_data["actions"] = "notable"
+ else:
+ module.fail_json(
+ msg="Unable to find correlation search: {0}", splunk_data=splunk_data
+ )
+
+ if module.params["state"] == "present":
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
+ splunk_data=splunk_data,
+ )
+
+ if module.params["state"] == "absent":
+ # FIXME - need to figure out how to clear the action.notable.param fields from the api endpoint
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ needs_change = True
+ del query_dict["entry"][0]["content"][arg]
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["correlation_search_name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True,
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
new file mode 100644
index 00000000..1d45c3a3
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
@@ -0,0 +1,351 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for creation, deletion, and modification of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: True
+ type: str
+ description:
+ description:
+ - Description of the coorelation search, this will populate the description field for the web console
+ required: True
+ type: str
+ state:
+ description:
+ - Add, remove, enable, or disiable a correlation search.
+ required: True
+ choices: [ "present", "absent", "enabled", "disabled" ]
+ type: str
+ search:
+ description:
+ - SPL search string
+ type: str
+ required: True
+ app:
+ description:
+ - Splunk app to associate the correlation seach with
+ type: str
+ required: False
+ default: "SplunkEnterpriseSecuritySuite"
+ ui_dispatch_context:
+ description:
+ - Set an app to use for links such as the drill-down search in a notable
+ event or links in an email adaptive response action. If None, uses the
+ Application Context.
+ type: str
+ required: False
+ time_earliest:
+ description:
+ - Earliest time using relative time modifiers.
+ type: str
+ required: False
+ default: "-24h"
+ time_latest:
+ description:
+ - Latest time using relative time modifiers.
+ type: str
+ required: False
+ default: "now"
+ cron_schedule:
+ description:
+ - Enter a cron-style schedule.
+ - For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
+ - Real-time searches use a default schedule of C('*/5 * * * *').
+ type: str
+ required: False
+ default: "*/5 * * * *"
+ scheduling:
+ description:
+ - Controls the way the scheduler computes the next execution time of a scheduled search.
+ - >
+ Learn more:
+ https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
+ type: str
+ required: False
+ default: "real-time"
+ choices:
+ - "real-time"
+ - "continuous"
+ schedule_window:
+ description:
+ - Let report run at any time within a window that opens at its scheduled run time,
+ to improve efficiency when there are many concurrently scheduled reports.
+ The "auto" setting automatically determines the best window width for the report.
+ type: str
+ required: False
+ default: "0"
+ schedule_priority:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "Default"
+ choices:
+ - "Default"
+ - "Higher"
+ - "Highest"
+ trigger_alert_when:
+ description:
+ - Raise the scheduling priority of a report. Set to "Higher" to prioritize
+ it above other searches of the same scheduling mode, or "Highest" to
+ prioritize it above other searches regardless of mode. Use with discretion.
+ type: str
+ required: False
+ default: "number of events"
+ choices:
+ - "number of events"
+ - "number of results"
+ - "number of hosts"
+ - "number of sources"
+ trigger_alert_when_condition:
+ description:
+ - Conditional to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "greater than"
+ choices:
+ - "greater than"
+ - "less than"
+ - "equal to"
+ - "not equal to"
+ - "drops by"
+ - "rises by"
+ trigger_alert_when_value:
+ description:
+ - Value to pass to C(trigger_alert_when)
+ type: str
+ required: False
+ default: "10"
+ throttle_window_duration:
+ description:
+ - "How much time to ignore other events that match the field values specified in Fields to group by."
+ type: str
+ required: False
+ throttle_fields_to_group_by:
+ description:
+ - "Type the fields to consider for matching events for throttling."
+ type: str
+ required: False
+ suppress_alerts:
+ description:
+ - "To suppress alerts from this correlation search or not"
+ type: bool
+ required: False
+ default: False
+notes:
+ - >
+ The following options are not yet supported:
+ throttle_window_duration, throttle_fields_to_group_by, and adaptive_response_actions
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example of creating a correlation search with splunk.es.coorelation_search
+ splunk.es.correlation_search:
+ name: "Example Coorelation Search From Ansible"
+ description: "Example Coorelation Search From Ansible, description."
+ search: 'source="/var/log/snort.log"'
+ state: "present"
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ description=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
+ search=dict(required=True, type="str"),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
+ ui_dispatch_context=dict(type="str", required=False),
+ time_earliest=dict(type="str", required=False, default="-24h"),
+ time_latest=dict(type="str", required=False, default="now"),
+ cron_schedule=dict(type="str", required=False, default="*/5 * * * *"),
+ scheduling=dict(
+ type="str",
+ required=False,
+ default="real-time",
+ choices=["real-time", "continuous"],
+ ),
+ schedule_window=dict(type="str", required=False, default="0"),
+ schedule_priority=dict(
+ type="str",
+ required=False,
+ default="Default",
+ choices=["Default", "Higher", "Highest"],
+ ),
+ trigger_alert_when=dict(
+ type="str",
+ required=False,
+ default="number of events",
+ choices=[
+ "number of events",
+ "number of results",
+ "number of hosts",
+ "number of sources",
+ ],
+ ),
+ trigger_alert_when_condition=dict(
+ type="str",
+ required=False,
+ default="greater than",
+ choices=[
+ "greater than",
+ "less than",
+ "equal to",
+ "not equal to",
+ "drops by",
+ "rises by",
+ ],
+ ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
+ throttle_window_duration=dict(type="str", required=False),
+ throttle_fields_to_group_by=dict(type="str", required=False),
+ suppress_alerts=dict(type="bool", required=False, default=False),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+ if module.params["state"] in ["present", "enabled"]:
+ module_disabled_state = False
+ else:
+ module_disabled_state = True
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state"],
+ )
+
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+
+ # Have to custom craft the data here because they overload the saved searches
+ # endpoint in the rest api and we want to hide the nuance from the user
+ request_post_data = {}
+ request_post_data["name"] = module.params["name"]
+ request_post_data["action.correlationsearch.enabled"] = "1"
+ request_post_data["is_scheduled"] = True
+ request_post_data["dispatch.rt_backfill"] = True
+ request_post_data["action.correlationsearch.label"] = module.params["name"]
+ request_post_data["description"] = module.params["description"]
+ request_post_data["search"] = module.params["search"]
+ request_post_data["request.ui_dispatch_app"] = module.params["app"]
+ if module.params["ui_dispatch_context"]:
+ request_post_data["request.ui_dispatch_context"] = module.params[
+ "ui_dispatch_context"
+ ]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
+ request_post_data["dispatch.latest_time"] = module.params["time_latest"]
+ request_post_data["cron_schedule"] = module.params["cron_schedule"]
+ if module.params["scheduling"] == "real-time":
+ request_post_data["realtime_schedule"] = True
+ else:
+ request_post_data["realtime_schedule"] = False
+ request_post_data["schedule_window"] = module.params["schedule_window"]
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
+ request_post_data["alert_type"] = module.params["trigger_alert_when"]
+ request_post_data["alert_comparator"] = module.params[
+ "trigger_alert_when_condition"
+ ]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
+ request_post_data["alert.suppress"] = module.params["suppress_alerts"]
+ request_post_data["disabled"] = module_disabled_state
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ if query_dict:
+ needs_change = False
+ for arg in request_post_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ # FIXME - need to find a reasonable way to deal with action.correlationsearch.enabled
+ del request_post_data[
+ "name"
+ ] # If this is present, splunk assumes we're trying to create a new one wit the same name
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ ),
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
+ data=urlencode(request_post_data),
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"]))
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
new file mode 100644
index 00000000..b6aed3c0
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: correlation_search_info
+short_description: Manage Splunk Enterprise Security Correlation Searches
+description:
+ - This module allows for the query of Splunk Enterprise Security Correlation Searches
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - Name of coorelation search
+ required: false
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+
+EXAMPLES = """
+- name: Example usage of splunk.es.correlation_search_info
+ splunk.es.correlation_search_info:
+ name: "Name of correlation search"
+ register: scorrelation_search_info
+
+- name: debug display information gathered
+ debug:
+ var: scorrelation_search_info
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
+
+
+def main():
+
+ argspec = dict(name=dict(required=False, type="str"))
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(module, headers={"Content-Type": "application/json"})
+
+ if module.params["name"]:
+ try:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ except HTTPError as e:
+ # the data monitor doesn't exist
+ query_dict = {}
+ else:
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ )
+
+ module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
new file mode 100644
index 00000000..abe1bd2e
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
@@ -0,0 +1,258 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_monitor
+short_description: Manage Splunk Data Inputs of type Monitor
+description:
+ - This module allows for addition or deletion of File and Directory Monitor Data Inputs in Splunk.
+version_added: "1.0.0"
+options:
+ name:
+ description:
+ - The file or directory path to monitor on the system.
+ required: True
+ type: str
+ state:
+ description:
+ - Add or remove a data source.
+ required: True
+ choices:
+ - "present"
+ - "absent"
+ type: str
+ blacklist:
+ description:
+ - Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
+ required: False
+ type: str
+ check_index:
+ description:
+ - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ required: False
+ type: bool
+ default: False
+ check_path:
+ description:
+ - If set to C(True), the name value is checked to ensure that it exists.
+ required: False
+ type: bool
+ crc_salt:
+ description:
+ - A string that modifies the file tracking identity for files in this input.
+ The magic value <SOURCE> invokes special behavior (see admin documentation).
+ required: False
+ type: str
+ disabled:
+ description:
+ - Indicates if input monitoring is disabled.
+ required: False
+ default: False
+ type: bool
+ followTail:
+ description:
+ - If set to C(True), files that are seen for the first time is read from the end.
+ required: False
+ type: bool
+ default: False
+ host:
+ description:
+ - The value to populate in the host field for events from this data input.
+ required: False
+ type: str
+ host_regex:
+ description:
+ - Specify a regular expression for a file path. If the path for a file
+ matches this regular expression, the captured value is used to populate
+ the host field for events from this data input. The regular expression
+ must have one capture group.
+ required: False
+ type: str
+ host_segment:
+ description:
+ - Use the specified slash-separate segment of the filepath as the host field value.
+ required: False
+ type: int
+ ignore_older_than:
+ description:
+ - Specify a time value. If the modification time of a file being monitored
+ falls outside of this rolling time window, the file is no longer being monitored.
+ required: False
+ type: str
+ index:
+ description:
+ - Which index events from this input should be stored in. Defaults to default.
+ required: False
+ type: str
+ recursive:
+ description:
+ - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
+ required: False
+ type: bool
+ default: False
+ rename_source:
+ description:
+ - The value to populate in the source field for events from this data input.
+ The same source should not be used for multiple data inputs.
+ required: False
+ type: str
+ sourcetype:
+ description:
+ - The value to populate in the sourcetype field for incoming events.
+ required: False
+ type: str
+ time_before_close:
+ description:
+ - When Splunk software reaches the end of a file that is being read, the
+ file is kept open for a minimum of the number of seconds specified in
+ this value. After this period has elapsed, the file is checked again for
+ more data.
+ required: False
+ type: int
+ whitelist:
+ description:
+ - Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
+ required: False
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input monitor with splunk.es.data_input_monitor
+ splunk.es.data_input_monitor:
+ name: "/var/log/example.log"
+ state: "present"
+ recursive: True
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ name=dict(required=True, type="str"),
+ state=dict(choices=["present", "absent"], required=True),
+ blacklist=dict(required=False, type="str", default=None),
+ check_index=dict(required=False, type="bool", default=False),
+ check_path=dict(required=False, type="bool", default=None),
+ crc_salt=dict(required=False, type="str", default=None),
+ disabled=dict(required=False, type="bool", default=False),
+ followTail=dict(required=False, type="bool", default=False),
+ host=dict(required=False, type="str", default=None),
+ host_segment=dict(required=False, type="int", default=None),
+ host_regex=dict(required=False, type="str", default=None),
+ ignore_older_than=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ recursive=dict(required=False, type="bool", default=False),
+ rename_source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ time_before_close=dict(required=False, type="int", default=None),
+ whitelist=dict(required=False, type="str", default=None),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ # map of keys for the splunk REST API that aren't pythonic so we have to
+ # handle the substitutes
+ keymap = {
+ "check_index": "check-index",
+ "check_path": "check-path",
+ "crc_salt": "crc-salt",
+ "ignore_older_than": "ignore-older-than",
+ "rename_source": "rename-source",
+ "time_before_close": "time-before-close",
+ }
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ keymap=keymap,
+ not_rest_data_keys=["state"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+
+ if module.params["state"] == "present":
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ _data = splunk_request.get_data()
+ _data["name"] = module.params["name"]
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/monitor", data=urlencode(_data)
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+
+ if module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/monitor/{0}".format(
+ quote_plus(module.params["name"])
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
new file mode 100644
index 00000000..fba8feba
--- /dev/null
+++ b/collections-debian-merged/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
@@ -0,0 +1,272 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# https://github.com/ansible/ansible/issues/65816
+# https://github.com/PyCQA/pylint/issues/214
+
+# (c) 2018, Adam Miller (admiller@redhat.com)
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+---
+module: data_input_network
+short_description: Manage Splunk Data Inputs of type TCP or UDP
+description:
+ - This module allows for addition or deletion of TCP and UDP Data Inputs in Splunk.
+version_added: "1.0.0"
+options:
+ protocol:
+ description:
+ - Choose between tcp or udp
+ required: True
+ choices:
+ - 'tcp'
+ - 'udp'
+ type: str
+ connection_host:
+ description:
+ - Set the host for the remote server that is sending data.
+ - C(ip) sets the host to the IP address of the remote server sending data.
+ - C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
+ - C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
+ default: "ip"
+ required: False
+ type: str
+ choices:
+ - "ip"
+ - "dns"
+ - "none"
+ state:
+ description:
+ - Enable, disable, create, or destroy
+ choices:
+ - "present"
+ - "absent"
+ - "enabled"
+ - "disable"
+ required: False
+ default: "present"
+ type: str
+ datatype:
+ description: >
+ Forwarders can transmit three types of data: raw, unparsed, or parsed.
+ C(cooked) data refers to parsed and unparsed formats.
+ choices:
+ - "cooked"
+ - "raw"
+ default: "raw"
+ required: False
+ type: str
+ host:
+ description:
+ - Host from which the indexer gets data.
+ required: False
+ type: str
+ index:
+ description:
+ - default Index to store generated events.
+ type: str
+ name:
+ description:
+ - The input port which receives raw data.
+ required: True
+ type: str
+ queue:
+ description:
+ - Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
+ - Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more
+ information about props.conf and rules for timestamping and linebreaking, refer to props.conf and
+ the online documentation at "Monitor files and directories with inputs.conf"
+ - Set queue to indexQueue to send your data directly into the index.
+ choices:
+ - "parsingQueue"
+ - "indexQueue"
+ type: str
+ required: False
+ default: "parsingQueue"
+ rawTcpDoneTimeout:
+ description:
+ - Specifies in seconds the timeout value for adding a Done-key.
+ - If a connection over the port specified by name remains idle after receiving data for specified
+ number of seconds, it adds a Done-key. This implies the last event is completely received.
+ default: 10
+ type: int
+ required: False
+ restrictToHost:
+ description:
+ - Allows for restricting this input to only accept data from the host specified here.
+ required: False
+ type: str
+ ssl:
+ description:
+ - Enable or disble ssl for the data stream
+ required: False
+ type: bool
+ source:
+ description:
+ - Sets the source key/field for events from this input. Defaults to the input file path.
+ - >
+ Sets the source key initial value. The key is used during parsing/indexing, in particular to set
+ the source field during indexing. It is also the source field used at search time. As a convenience,
+ the chosen string is prepended with 'source::'.
+ - >
+ Note: Overriding the source key is generally not recommended. Typically, the input layer provides a
+ more accurate string to aid in problem analysis and investigation, accurately recording the file from
+ which the data was retrieved. Consider use of source types, tagging, and search wildcards before
+ overriding this value.
+ type: str
+ sourcetype:
+ description:
+ - Set the source type for events from this input.
+ - '"sourcetype=" is automatically prepended to <string>.'
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ type: str
+
+author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
+"""
+
+EXAMPLES = """
+- name: Example adding data input network with splunk.es.data_input_network
+ splunk.es.data_input_network:
+ name: "8099"
+ protocol: "tcp"
+ state: "present"
+"""
+
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils._text import to_text
+
+from ansible.module_utils.urls import Request
+from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.splunk.es.plugins.module_utils.splunk import (
+ SplunkRequest,
+ parse_splunk_args,
+)
+
+import copy
+
+
+def main():
+
+ argspec = dict(
+ state=dict(
+ required=False,
+ choices=["present", "absent", "enabled", "disable"],
+ default="present",
+ type="str",
+ ),
+ connection_host=dict(
+ required=False, choices=["ip", "dns", "none"], default="ip", type="str"
+ ),
+ host=dict(required=False, type="str", default=None),
+ index=dict(required=False, type="str", default=None),
+ name=dict(required=True, type="str"),
+ protocol=dict(required=True, type="str", choices=["tcp", "udp"]),
+ queue=dict(
+ required=False,
+ type="str",
+ choices=["parsingQueue", "indexQueue"],
+ default="parsingQueue",
+ ),
+ rawTcpDoneTimeout=dict(required=False, type="int", default=10),
+ restrictToHost=dict(required=False, type="str", default=None),
+ ssl=dict(required=False, type="bool", default=None),
+ source=dict(required=False, type="str", default=None),
+ sourcetype=dict(required=False, type="str", default=None),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
+ )
+
+ module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
+
+ splunk_request = SplunkRequest(
+ module,
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ not_rest_data_keys=["state", "datatype", "protocol"],
+ )
+ # This is where the splunk_* args are processed
+ request_data = splunk_request.get_data()
+
+ query_dict = splunk_request.get_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+
+ if module.params["state"] in ["present", "enabled", "disabled"]:
+ _data = splunk_request.get_data()
+ if module.params["state"] in ["present", "enabled"]:
+ _data["disabled"] = False
+ else:
+ _data["disabled"] = True
+ if query_dict:
+ needs_change = False
+ for arg in request_data:
+ if arg in query_dict["entry"][0]["content"]:
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg]
+ ):
+ needs_change = True
+ if not needs_change:
+ module.exit_json(
+ changed=False, msg="Nothing to do.", splunk_data=query_dict
+ )
+ if module.check_mode and needs_change:
+ module.exit_json(
+ changed=True,
+ msg="A change would have been made if not in check mode.",
+ splunk_data=query_dict,
+ )
+ if needs_change:
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ data=urlencode(_data),
+ )
+ )
+ if module.params["state"] in ["present", "enabled"]:
+ module.exit_json(
+ changed=True, msg="{0} updated.", splunk_data=splunk_data
+ )
+ else:
+ module.exit_json(
+ changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ )
+ else:
+ # Create it
+ splunk_data = splunk_request.create_update(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ ),
+ data=urlencode(_data),
+ )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
+ elif module.params["state"] == "absent":
+ if query_dict:
+ splunk_data = splunk_request.delete_by_path(
+ "servicesNS/nobody/search/data/inputs/{0}/{1}/{2}".format(
+ quote_plus(module.params["protocol"]),
+ quote_plus(module.params["datatype"]),
+ quote_plus(module.params["name"]),
+ )
+ )
+ module.exit_json(
+ changed=True,
+ msg="Deleted {0}.".format(module.params["name"]),
+ splunk_data=splunk_data,
+ )
+
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data={})
+
+
+if __name__ == "__main__":
+ main()