summaryrefslogtreecommitdiffstats
path: root/ansible_collections/splunk/es/plugins
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
commit38b7c80217c4e72b1d8988eb1e60bb6e77334114 (patch)
tree356e9fd3762877d07cde52d21e77070aeff7e789 /ansible_collections/splunk/es/plugins
parentAdding upstream version 7.7.0+dfsg. (diff)
downloadansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.tar.xz
ansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.zip
Adding upstream version 9.4.0+dfsg.upstream/9.4.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/splunk/es/plugins')
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py96
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py83
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py68
-rw-r--r--ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py74
-rw-r--r--ansible_collections/splunk/es/plugins/httpapi/splunk.py16
-rw-r--r--ansible_collections/splunk/es/plugins/module_utils/splunk.py41
-rw-r--r--ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py153
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search.py117
-rw-r--r--ansible_collections/splunk/es/plugins/modules/correlation_search_info.py15
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_monitor.py101
-rw-r--r--ansible_collections/splunk/es/plugins/modules/data_input_network.py61
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py153
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py41
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py117
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py15
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py12
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py101
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py61
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py16
-rw-r--r--ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py18
20 files changed, 630 insertions, 729 deletions
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
index a95e4b3ed..ee6364708 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_adaptive_response_notable_events.py
@@ -23,18 +23,20 @@ The module file for adaptive_response_notable_events
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -42,9 +44,6 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
from ansible_collections.splunk.es.plugins.modules.splunk_adaptive_response_notable_events import (
DOCUMENTATION,
)
@@ -56,9 +55,7 @@ class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
- self.api_object = (
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
- )
+ self.api_object = "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
self.module_name = "adaptive_response_notable_events"
self.key_transform = {
"action.notable.param.default_owner": "default_owner",
@@ -80,7 +77,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -103,17 +100,13 @@ class ActionModule(ActionBase):
def save_params(self, want_conf):
param_store = {}
if "recommended_actions" in want_conf:
- param_store["recommended_actions"] = want_conf[
- "recommended_actions"
- ]
+ param_store["recommended_actions"] = want_conf["recommended_actions"]
if "extract_artifacts" in want_conf:
param_store["extract_artifacts"] = want_conf["extract_artifacts"]
if "next_steps" in want_conf:
param_store["next_steps"] = want_conf["next_steps"]
if "investigation_profiles" in want_conf:
- param_store["investigation_profiles"] = want_conf[
- "investigation_profiles"
- ]
+ param_store["investigation_profiles"] = want_conf["investigation_profiles"]
return param_store
@@ -125,8 +118,7 @@ class ActionModule(ActionBase):
if metadata["actions"] == "notable":
pass
elif (
- len(metadata["actions"].split(",")) > 0
- and "notable" not in metadata["actions"]
+ len(metadata["actions"].split(",")) > 0 and "notable" not in metadata["actions"]
):
metadata["actions"] = metadata["actions"] + ", notable"
else:
@@ -136,10 +128,7 @@ class ActionModule(ActionBase):
if "actions" in metadata:
if metadata["actions"] == "notable":
metadata["actions"] = ""
- elif (
- len(metadata["actions"].split(",")) > 0
- and "notable" in metadata["actions"]
- ):
+ elif len(metadata["actions"].split(",")) > 0 and "notable" in metadata["actions"]:
tmp_list = metadata["actions"].split(",")
tmp_list.remove(" notable")
metadata["actions"] = ",".join(tmp_list)
@@ -161,7 +150,7 @@ class ActionModule(ActionBase):
res.pop("investigation_profiles")
else:
res["investigation_profiles"] = json.loads(
- res["investigation_profiles"]
+ res["investigation_profiles"],
)
investigation_profiles = []
for keys in res["investigation_profiles"].keys():
@@ -209,12 +198,12 @@ class ActionModule(ActionBase):
if "action.notable.param.extract_artifacts" in res:
res["action.notable.param.extract_artifacts"] = json.dumps(
- res["action.notable.param.extract_artifacts"]
+ res["action.notable.param.extract_artifacts"],
)
if "action.notable.param.recommended_actions" in res:
res["action.notable.param.recommended_actions"] = ",".join(
- res["action.notable.param.recommended_actions"]
+ res["action.notable.param.recommended_actions"],
)
if "action.notable.param.investigation_profiles" in res:
@@ -222,7 +211,7 @@ class ActionModule(ActionBase):
for element in res["action.notable.param.investigation_profiles"]:
investigation_profiles["profile://" + element] = {}
res["action.notable.param.investigation_profiles"] = json.dumps(
- investigation_profiles
+ investigation_profiles,
)
if "action.notable.param.next_steps" in res:
@@ -233,7 +222,7 @@ class ActionModule(ActionBase):
# NOTE: version:1 appears to be hard coded when you create this via the splunk web UI
next_steps_dict = {"version": 1, "data": next_steps}
res["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if "action.notable.param.default_status" in res:
@@ -259,20 +248,20 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
- )
+ ),
)
search_result = {}
if query_dict:
search_result, metadata = self.map_params_to_object(
- query_dict["entry"][0]
+ query_dict["entry"][0],
)
else:
raise AnsibleActionFail(
"Correlation Search '{0}' doesn't exist".format(
- correlation_search_name
- )
+ correlation_search_name,
+ ),
)
return search_result, metadata
@@ -285,15 +274,14 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name, metadata = self.search_for_resource_name(
- conn_request, want_conf["correlation_search_name"]
+ conn_request,
+ want_conf["correlation_search_name"],
)
search_by_name = utils.remove_empties(search_by_name)
# Compare obtained values with a dict representing values in a 'deleted' state
diff_cmp = {
- "correlation_search_name": want_conf[
- "correlation_search_name"
- ],
+ "correlation_search_name": want_conf["correlation_search_name"],
"drilldown_earliest_offset": "$info_min_time$",
"drilldown_latest_offset": "$info_max_time$",
}
@@ -367,7 +355,8 @@ class ActionModule(ActionBase):
remove_from_diff_compare = []
for want_conf in config:
have_conf, metadata = self.search_for_resource_name(
- conn_request, want_conf["correlation_search_name"]
+ conn_request,
+ want_conf["correlation_search_name"],
)
correlation_search_name = want_conf["correlation_search_name"]
@@ -385,17 +374,17 @@ class ActionModule(ActionBase):
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
# need to store 'recommended_actions','extract_artifacts'
# 'next_steps' and 'investigation_profiles'
# since merging in the parsed form will eliminate any differences
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
# restoring parameters
@@ -404,7 +393,8 @@ class ActionModule(ActionBase):
changed = True
payload = self.map_objects_to_params(
- metadata, want_conf
+ metadata,
+ want_conf,
)
url = "{0}/{1}".format(
@@ -416,18 +406,20 @@ class ActionModule(ActionBase):
data=payload,
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
- conn_request=conn_request, config=[want_conf]
+ conn_request=conn_request,
+ config=[want_conf],
)
changed = True
payload = self.map_objects_to_params(
- metadata, want_conf
+ metadata,
+ want_conf,
)
url = "{0}/{1}".format(
@@ -439,7 +431,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -461,7 +453,7 @@ class ActionModule(ActionBase):
)
response_json, metadata = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -503,14 +495,12 @@ class ActionModule(ActionBase):
for item in config:
self._result[self.module_name]["gathered"].append(
self.search_for_resource_name(
- conn_request, item["correlation_search_name"]
- )[0]
+ conn_request,
+ item["correlation_search_name"],
+ )[0],
)
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
index 5f0daea16..b5ba500ae 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_correlation_searches.py
@@ -23,17 +23,20 @@ The module file for splunk_correlation_searches
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
-from ansible.plugins.action import ActionBase
+
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -41,12 +44,7 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_correlation_searches import DOCUMENTATION
class ActionModule(ActionBase):
@@ -55,9 +53,7 @@ class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
self._result = None
- self.api_object = (
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
- )
+ self.api_object = "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
self.module_name = "correlation_searches"
self.key_transform = {
"disabled": "disabled",
@@ -83,7 +79,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -111,9 +107,7 @@ class ActionModule(ActionBase):
if "annotations" in want_conf:
param_store["annotations"] = want_conf["annotations"]
if "throttle_fields_to_group_by" in want_conf:
- param_store["throttle_fields_to_group_by"] = want_conf[
- "throttle_fields_to_group_by"
- ]
+ param_store["throttle_fields_to_group_by"] = want_conf["throttle_fields_to_group_by"]
return param_store
@@ -137,9 +131,7 @@ class ActionModule(ActionBase):
res["trigger_alert"] = "for each result"
if "throttle_fields_to_group_by" in res:
- res["throttle_fields_to_group_by"] = res[
- "throttle_fields_to_group_by"
- ].split(",")
+ res["throttle_fields_to_group_by"] = res["throttle_fields_to_group_by"].split(",")
if "annotations" in res:
res["annotations"] = json.loads(res["annotations"])
@@ -149,7 +141,6 @@ class ActionModule(ActionBase):
# need to check for custom annotation frameworks
for k, v in res["annotations"].items():
if k in {"cis20", "nist", "mitre_attack", "kill_chain_phases"}:
-
continue
entry = {}
entry["framework"] = k
@@ -188,7 +179,7 @@ class ActionModule(ActionBase):
if "alert.suppress.fields" in res:
res["alert.suppress.fields"] = ",".join(
- res["alert.suppress.fields"]
+ res["alert.suppress.fields"],
)
if (
@@ -196,12 +187,12 @@ class ActionModule(ActionBase):
and "custom" in res["action.correlationsearch.annotations"]
):
for ele in res["action.correlationsearch.annotations"]["custom"]:
- res["action.correlationsearch.annotations"][
- ele["framework"]
- ] = ele["custom_annotations"]
+ res["action.correlationsearch.annotations"][ele["framework"]] = ele[
+ "custom_annotations"
+ ]
res["action.correlationsearch.annotations"].pop("custom")
res["action.correlationsearch.annotations"] = json.dumps(
- res["action.correlationsearch.annotations"]
+ res["action.correlationsearch.annotations"],
)
return res
@@ -211,7 +202,7 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote(correlation_search_name),
- )
+ ),
)
search_result = {}
@@ -227,7 +218,8 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if search_by_name:
@@ -259,7 +251,8 @@ class ActionModule(ActionBase):
remove_from_diff_compare = []
for want_conf in config:
have_conf = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if have_conf:
@@ -282,10 +275,11 @@ class ActionModule(ActionBase):
param_store = self.save_params(want_conf)
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
# restoring parameters
@@ -304,13 +298,14 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
elif self._task.args["state"] == "replaced":
self.delete_module_api_config(
- conn_request=conn_request, config=[want_conf]
+ conn_request=conn_request,
+ config=[want_conf],
)
changed = True
@@ -333,7 +328,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -354,7 +349,8 @@ class ActionModule(ActionBase):
# while creating new correlation search, this is how to set the 'app' field
if "app" in want_conf:
url = url.replace(
- "SplunkEnterpriseSecuritySuite", want_conf["app"]
+ "SplunkEnterpriseSecuritySuite",
+ want_conf["app"],
)
api_response = conn_request.create_update(
@@ -362,7 +358,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -403,20 +399,19 @@ class ActionModule(ActionBase):
self._result["gathered"] = []
for item in config:
result = self.search_for_resource_name(
- conn_request, item["name"]
+ conn_request,
+ item["name"],
)
if result:
self._result["gathered"].append(result)
for item in config:
self._result["gathered"].append(
self.search_for_resource_name(
- conn_request, item["name"]
- )
+ conn_request,
+ item["name"],
+ ),
)
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
index 7c9c03a55..e2e68841f 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_monitor.py
@@ -23,15 +23,17 @@ The module file for data_inputs_monitor
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible.plugins.action import ActionBase
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
@@ -39,12 +41,7 @@ from ansible_collections.splunk.es.plugins.module_utils.splunk import (
remove_get_keys_from_payload_dict,
set_defaults,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_monitor import DOCUMENTATION
class ActionModule(ActionBase):
@@ -77,7 +74,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -102,7 +99,7 @@ class ActionModule(ActionBase):
def search_for_resource_name(self, conn_request, directory_name):
query_dict = conn_request.get_by_path(
- "{0}/{1}".format(self.api_object, quote_plus(directory_name))
+ "{0}/{1}".format(self.api_object, quote_plus(directory_name)),
)
search_result = {}
@@ -118,14 +115,16 @@ class ActionModule(ActionBase):
changed = False
for want_conf in config:
search_by_name = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if search_by_name:
before.append(search_by_name)
conn_request.delete_by_path(
"{0}/{1}".format(
- self.api_object, quote_plus(want_conf["name"])
- )
+ self.api_object,
+ quote_plus(want_conf["name"]),
+ ),
)
changed = True
after = []
@@ -157,7 +156,8 @@ class ActionModule(ActionBase):
]
for want_conf in config:
have_conf = self.search_for_resource_name(
- conn_request, want_conf["name"]
+ conn_request,
+ want_conf["name"],
)
if have_conf:
@@ -173,22 +173,24 @@ class ActionModule(ActionBase):
if diff:
diff = remove_get_keys_from_payload_dict(
- diff, remove_from_diff_compare
+ diff,
+ remove_from_diff_compare,
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
url = "{0}/{1}".format(
self.api_object,
@@ -199,7 +201,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -208,12 +210,13 @@ class ActionModule(ActionBase):
"{0}/{1}".format(
self.api_object,
quote_plus(want_conf["name"]),
- )
+ ),
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
url = "{0}".format(self.api_object)
api_response = conn_request.create_update(
@@ -221,7 +224,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.append(response_json)
@@ -242,7 +245,7 @@ class ActionModule(ActionBase):
data=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0]
+ api_response["entry"][0],
)
after.extend(before)
@@ -257,7 +260,6 @@ class ActionModule(ActionBase):
return res_config, changed
def run(self, tmp=None, task_vars=None):
-
self._supports_check_mode = True
self._result = super(ActionModule, self).run(tmp, task_vars)
@@ -283,18 +285,16 @@ class ActionModule(ActionBase):
self._result["changed"] = False
for item in config:
result = self.search_for_resource_name(
- conn_request, item["name"]
+ conn_request,
+ item["name"],
)
if result:
self._result["gathered"].append(result)
else:
self._result["gathered"] = conn_request.get_by_path(
- self.api_object
+ self.api_object,
)["entry"]
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
(
self._result[self.module_name],
self._result["changed"],
diff --git a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
index bd72d12b5..2558a05f4 100644
--- a/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
+++ b/ansible_collections/splunk/es/plugins/action/splunk_data_inputs_network.py
@@ -23,28 +23,25 @@ The module file for data_inputs_network
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.connection import Connection
-
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible.plugins.action import ActionBase
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
)
+
from ansible_collections.splunk.es.plugins.module_utils.splunk import (
SplunkRequest,
map_obj_to_params,
map_params_to_obj,
remove_get_keys_from_payload_dict,
)
-from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
- AnsibleArgSpecValidator,
-)
-from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import (
- DOCUMENTATION,
-)
+from ansible_collections.splunk.es.plugins.modules.splunk_data_inputs_network import DOCUMENTATION
class ActionModule(ActionBase):
@@ -79,7 +76,7 @@ class ActionModule(ActionBase):
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
- data=self._task.args,
+ data=utils.remove_empties(self._task.args),
schema=DOCUMENTATION,
schema_format="doc",
name=self._task.action,
@@ -172,7 +169,7 @@ class ActionModule(ActionBase):
url = url[:-1]
else:
raise AnsibleActionFail(
- "Incompatible protocol specified. Please specify 'tcp' or 'udp'"
+ "Incompatible protocol specified. Please specify 'tcp' or 'udp'",
)
if req_type == "get":
@@ -199,7 +196,8 @@ class ActionModule(ActionBase):
if query_dict:
search_result = self.map_params_to_object(
- query_dict["entry"][0], datatype
+ query_dict["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -240,7 +238,8 @@ class ActionModule(ActionBase):
and want_conf["restrict_to_host"] not in want_conf["name"]
):
want_conf["name"] = "{0}:{1}".format(
- want_conf["restrict_to_host"], want_conf["name"]
+ want_conf["restrict_to_host"],
+ want_conf["name"],
)
# If datatype is "splunktcptoken", the value "splunktcptoken://" is appended
@@ -251,7 +250,8 @@ class ActionModule(ActionBase):
and "splunktcptoken://" not in want_conf["name"]
):
want_conf["name"] = "{0}{1}".format(
- "splunktcptoken://", want_conf["name"]
+ "splunktcptoken://",
+ want_conf["name"],
)
name = want_conf["name"]
@@ -296,7 +296,8 @@ class ActionModule(ActionBase):
raise AnsibleActionFail("No name specified")
have_conf, protocol, datatype, name, _old_name = self.parse_config(
- conn_request, want_conf
+ conn_request,
+ want_conf,
)
if protocol == "tcp" and datatype == "ssl":
@@ -336,14 +337,11 @@ class ActionModule(ActionBase):
]
have_conf, protocol, datatype, name, old_name = self.parse_config(
- conn_request, want_conf
+ conn_request,
+ want_conf,
)
- if (
- protocol == "tcp"
- and datatype == "ssl"
- and self._task.args["state"] == "replaced"
- ):
+ if protocol == "tcp" and datatype == "ssl" and self._task.args["state"] == "replaced":
raise AnsibleActionFail("Replaced state not supported for SSL")
if have_conf:
@@ -358,22 +356,24 @@ class ActionModule(ActionBase):
if diff:
diff = remove_get_keys_from_payload_dict(
- diff, remove_from_diff_compare
+ diff,
+ remove_from_diff_compare,
)
if diff:
before.append(have_conf)
if self._task.args["state"] == "merged":
-
want_conf = utils.remove_empties(
- utils.dict_merge(have_conf, want_conf)
+ utils.dict_merge(have_conf, want_conf),
)
want_conf = remove_get_keys_from_payload_dict(
- want_conf, remove_from_diff_compare
+ want_conf,
+ remove_from_diff_compare,
)
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
api_response = self.request_by_path(
conn_request,
@@ -384,7 +384,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -404,7 +405,8 @@ class ActionModule(ActionBase):
changed = True
payload = map_obj_to_params(
- want_conf, self.key_transform
+ want_conf,
+ self.key_transform,
)
# while creating new conf, we need to only use numerical values
# splunk will later append param value to it.
@@ -419,7 +421,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -449,7 +452,8 @@ class ActionModule(ActionBase):
payload=payload,
)
response_json = self.map_params_to_object(
- api_response["entry"][0], datatype
+ api_response["entry"][0],
+ datatype,
)
# Adding back protocol and datatype fields for better clarity
@@ -490,7 +494,6 @@ class ActionModule(ActionBase):
self._result["changed"] = False
for item in config:
if item.get("name"):
-
result = self.search_for_resource_name(
conn_request,
item["protocol"],
@@ -514,10 +517,7 @@ class ActionModule(ActionBase):
else:
raise AnsibleActionFail("No protocol specified")
- elif (
- self._task.args["state"] == "merged"
- or self._task.args["state"] == "replaced"
- ):
+ elif self._task.args["state"] == "merged" or self._task.args["state"] == "replaced":
if config:
(
self._result[self.module_return],
diff --git a/ansible_collections/splunk/es/plugins/httpapi/splunk.py b/ansible_collections/splunk/es/plugins/httpapi/splunk.py
index 91f079e06..095a7b71c 100644
--- a/ansible_collections/splunk/es/plugins/httpapi/splunk.py
+++ b/ansible_collections/splunk/es/plugins/httpapi/splunk.py
@@ -3,6 +3,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -18,13 +19,12 @@ version_added: "1.0.0"
import json
-from ansible.module_utils.basic import to_text
from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import (
- HttpApiBase,
-)
+from ansible.module_utils.basic import to_text
from ansible.module_utils.connection import ConnectionError
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.ansible.netcommon.plugins.plugin_utils.httpapi_base import HttpApiBase
+
BASE_HEADERS = {"Content-Type": "application/json"}
@@ -47,7 +47,8 @@ class HttpApi(HttpApiBase):
return response.getcode(), self._response_to_json(value)
except AnsibleConnectionFailure as e:
self.connection.queue_message(
- "vvv", "AnsibleConnectionFailure: %s" % e
+ "vvv",
+ "AnsibleConnectionFailure: %s" % e,
)
if to_text("Could not connect to") in to_text(e):
raise
@@ -62,8 +63,7 @@ class HttpApi(HttpApiBase):
def _display_request(self, request_method, path):
self.connection.queue_message(
"vvvv",
- "Web Services: %s %s/%s"
- % (request_method, self.connection._url, path),
+ "Web Services: %s %s/%s" % (request_method, self.connection._url, path),
)
def _get_response_value(self, response_data):
diff --git a/ansible_collections/splunk/es/plugins/module_utils/splunk.py b/ansible_collections/splunk/es/plugins/module_utils/splunk.py
index 240481d3a..eb5ed2755 100644
--- a/ansible_collections/splunk/es/plugins/module_utils/splunk.py
+++ b/ansible_collections/splunk/es/plugins/module_utils/splunk.py
@@ -5,16 +5,17 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
+try:
+ from ssl import CertificateError
+except ImportError:
+ from backports.ssl_match_hostname import CertificateError
-from ansible.module_utils.urls import CertificateError
-from ansible.module_utils.six.moves.urllib.parse import urlencode
-from ansible.module_utils.connection import (
- ConnectionError,
- Connection,
-)
from ansible.module_utils._text import to_text
+from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.six import iteritems
+from ansible.module_utils.six.moves.urllib.parse import urlencode
def parse_splunk_args(module):
@@ -39,8 +40,8 @@ def parse_splunk_args(module):
except TypeError as e:
module.fail_json(
msg="Invalid data type provided for splunk module_util.parse_splunk_args: {0}".format(
- e
- )
+ e,
+ ),
)
@@ -62,9 +63,7 @@ def map_params_to_obj(module_params, key_transform):
obj = {}
for k, v in iteritems(key_transform):
if k in module_params and (
- module_params.get(k)
- or module_params.get(k) == 0
- or module_params.get(k) is False
+ module_params.get(k) or module_params.get(k) == 0 or module_params.get(k) is False
):
obj[v] = module_params.pop(k)
return obj
@@ -152,19 +151,22 @@ class SplunkRequest(object):
def _httpapi_error_handle(self, method, uri, payload=None):
try:
code, response = self.connection.send_request(
- method, uri, payload=payload
+ method,
+ uri,
+ payload=payload,
)
if code == 404:
if to_text("Object not found") in to_text(response) or to_text(
- "Could not find object"
+ "Could not find object",
) in to_text(response):
return {}
if not (code >= 200 and code < 300):
self.module.fail_json(
msg="Splunk httpapi returned error {0} with message {1}".format(
- code, response
+ code,
+ response,
),
)
@@ -181,7 +183,7 @@ class SplunkRequest(object):
except ValueError as e:
try:
self.module.fail_json(
- msg="certificate not found: {0}".format(e)
+ msg="certificate not found: {0}".format(e),
)
except AttributeError:
pass
@@ -211,9 +213,7 @@ class SplunkRequest(object):
if self.legacy and not config:
config = self.module.params
for param in config:
- if (config[param]) is not None and (
- param not in self.not_rest_data_keys
- ):
+ if (config[param]) is not None and (param not in self.not_rest_data_keys):
if param in self.keymap:
splunk_data[self.keymap[param]] = config[param]
else:
@@ -223,7 +223,7 @@ class SplunkRequest(object):
except TypeError as e:
self.module.fail_json(
- msg="invalid data type provided: {0}".format(e)
+ msg="invalid data type provided: {0}".format(e),
)
def get_urlencoded_data(self, config):
@@ -252,5 +252,6 @@ class SplunkRequest(object):
if data is not None and self.override:
data = self.get_urlencoded_data(data)
return self.post(
- "/{0}?output_mode=json".format(rest_path), payload=data
+ "/{0}?output_mode=json".format(rest_path),
+ payload=data,
)
diff --git a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
index 29099424e..0947c80fc 100644
--- a/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
+++ b/ansible_collections/splunk/es/plugins/modules/adaptive_response_notable_event.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -49,7 +50,7 @@ options:
description:
- Splunk Security Domain
type: str
- required: False
+ required: false
choices:
- "access"
- "endpoint"
@@ -62,7 +63,7 @@ options:
description:
- Severity rating
type: str
- required: False
+ required: false
choices:
- "informational"
- "low"
@@ -75,12 +76,12 @@ options:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
choices:
- "unassigned"
- "new"
@@ -92,19 +93,19 @@ options:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
- required: False
+ required: false
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
@@ -112,20 +113,21 @@ options:
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
- required: False
+ required: false
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
- required: False
+ required: false
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
- required: False
+ required: false
+ default: []
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
@@ -134,7 +136,8 @@ options:
making it easier to find them among the longer list of available actions.
type: list
elements: str
- required: False
+ required: false
+ default: []
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
@@ -151,7 +154,7 @@ options:
- dest
- dvc
- orig_host
- required: False
+ required: false
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
@@ -164,11 +167,10 @@ options:
default:
- user
- src_user
- required: False
-
+ required: false
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
-# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+# FIXME - adaptive response action association is probably going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
@@ -187,19 +189,15 @@ EXAMPLES = """
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
@@ -244,17 +242,22 @@ def main():
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
- required=False, type="str", default="$info_min_time$"
+ required=False,
+ type="str",
+ default="$info_min_time$",
),
drill_down_latest_offset=dict(
- required=False, type="str", default="$info_max_time$"
+ required=False,
+ type="str",
+ default="$info_max_time$",
),
investigation_profiles=dict(required=False, type="str"),
- next_steps=dict(
- required=False, type="list", elements="str", default=[]
- ),
+ next_steps=dict(required=False, type="list", elements="str", default=[]),
recommended_actions=dict(
- required=False, type="list", elements="str", default=[]
+ required=False,
+ type="list",
+ elements="str",
+ default=[],
),
asset_extraction=dict(
required=False,
@@ -283,8 +286,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
- )
+ quote_plus(module.params["correlation_search_name"]),
+ ),
)
# Have to custom craft the data here because they overload the saved searches
@@ -297,9 +300,7 @@ def main():
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
- next_steps = "[[action|{0}]]".format(
- module.params["next_steps"][0]
- )
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
else:
next_steps = ""
for next_step in module.params["next_steps"]:
@@ -312,66 +313,48 @@ def main():
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = module.params["recommended_actions"][0]
+ request_post_data["action.notable.param.recommended_actions"] = module.params[
+ "recommended_actions"
+ ][0]
else:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = ",".join(module.params["recommended_actions"])
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"],
+ )
- request_post_data["action.notable.param.rule_description"] = module.params[
- "description"
- ]
- request_post_data["action.notable.param.rule_title"] = module.params[
- "name"
- ]
- request_post_data["action.notable.param.security_domain"] = module.params[
- "security_domain"
- ]
- request_post_data["action.notable.param.severity"] = module.params[
- "severity"
+ request_post_data["action.notable.param.rule_description"] = module.params["description"]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params["security_domain"]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params["asset_extraction"]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
]
- request_post_data["action.notable.param.asset_extraction"] = module.params[
- "asset_extraction"
- ]
- request_post_data[
- "action.notable.param.identity_extraction"
- ] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
- request_post_data[
- "action.notable.param.default_owner"
- ] = module.params["default_owner"]
+ request_post_data["action.notable.param.default_owner"] = module.params["default_owner"]
if module.params["default_status"]:
- request_post_data[
- "action.notable.param.default_status"
- ] = module.params["default_status"]
+ request_post_data["action.notable.param.default_status"] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
- request_post_data["search"] = query_dict["entry"][0]["content"][
- "search"
- ]
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
- len(query_dict["entry"][0]["content"]["actions"].split(","))
- > 0
- and "notable"
- not in query_dict["entry"][0]["content"]["actions"]
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
@@ -389,12 +372,14 @@ def main():
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
- request_post_data[arg]
+ request_post_data[arg],
):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -405,15 +390,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
@@ -430,7 +413,9 @@ def main():
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -441,21 +426,17 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search.py b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
index 9c865507b..1664c8c8b 100644
--- a/ansible_collections/splunk/es/plugins/modules/correlation_search.py
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,29 +26,29 @@ options:
name:
description:
- Name of coorelation search
- required: True
+ required: true
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
- required: True
+ required: true
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
- required: True
+ required: true
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
- required: True
+ required: true
app:
description:
- Splunk app to associate the correlation seach with
type: str
- required: False
+ required: false
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
@@ -55,18 +56,18 @@ options:
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
- required: False
+ required: false
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "now"
cron_schedule:
description:
@@ -74,7 +75,7 @@ options:
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
- required: False
+ required: false
default: "*/5 * * * *"
scheduling:
description:
@@ -83,7 +84,7 @@ options:
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
- required: False
+ required: false
default: "real-time"
choices:
- "real-time"
@@ -94,7 +95,7 @@ options:
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
- required: False
+ required: false
default: "0"
schedule_priority:
description:
@@ -102,7 +103,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "Default"
choices:
- "Default"
@@ -114,7 +115,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "number of events"
choices:
- "number of events"
@@ -125,7 +126,7 @@ options:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "greater than"
choices:
- "greater than"
@@ -138,24 +139,24 @@ options:
description:
- Value to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
- required: False
+ required: false
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
- required: False
+ required: false
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
- required: False
- default: False
+ required: false
+ default: false
notes:
- >
The following options are not yet supported:
@@ -174,30 +175,22 @@ EXAMPLES = """
state: "present"
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
- state=dict(
- choices=["present", "absent", "enabled", "disabled"], required=True
- ),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
search=dict(required=True, type="str"),
- app=dict(
- type="str", required=False, default="SplunkEnterpriseSecuritySuite"
- ),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
@@ -239,9 +232,7 @@ def main():
"rises by",
],
),
- trigger_alert_when_value=dict(
- type="str", required=False, default="10"
- ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
@@ -264,8 +255,8 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
@@ -283,12 +274,8 @@ def main():
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
- request_post_data["request.ui_dispatch_context"] = module.params[
- "ui_dispatch_context"
- ]
- request_post_data["dispatch.earliest_time"] = module.params[
- "time_earliest"
- ]
+ request_post_data["request.ui_dispatch_context"] = module.params["ui_dispatch_context"]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
@@ -296,16 +283,10 @@ def main():
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
- request_post_data["schedule_priority"] = module.params[
- "schedule_priority"
- ].lower()
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
- request_post_data["alert_comparator"] = module.params[
- "trigger_alert_when_condition"
- ]
- request_post_data["alert_threshold"] = module.params[
- "trigger_alert_when_value"
- ]
+ request_post_data["alert_comparator"] = module.params["trigger_alert_when_condition"]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
@@ -316,13 +297,15 @@ def main():
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_post_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -337,12 +320,14 @@ def main():
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
+ quote_plus(module.params["name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -350,16 +335,12 @@ def main():
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
- "services/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"])),
)
module.exit_json(
changed=True,
@@ -367,9 +348,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
index 0ab756989..ecb36ce66 100644
--- a/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
+++ b/ansible_collections/splunk/es/plugins/modules/correlation_search_info.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -40,15 +41,13 @@ EXAMPLES = """
"""
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -62,15 +61,15 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
index 080d23d3b..b0108d74a 100644
--- a/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_monitor.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
state:
description:
- Add or remove a data source.
- required: True
+ required: true
choices:
- "present"
- "absent"
@@ -38,41 +39,41 @@ options:
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
- required: False
+ required: false
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
- required: False
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
+ required: false
type: bool
- default: False
+ default: false
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
- required: False
+ - If set to C(true), the name value is checked to ensure that it exists.
+ required: false
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
- required: False
+ required: false
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
- required: False
- default: False
+ required: false
+ default: false
type: bool
followTail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
- required: False
+ - If set to C(true), files that are seen for the first time is read from the end.
+ required: false
type: bool
- default: False
+ default: false
host:
description:
- The value to populate in the host field for events from this data input.
- required: False
+ required: false
type: str
host_regex:
description:
@@ -80,40 +81,40 @@ options:
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
- required: False
+ required: false
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
- required: False
+ required: false
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
- required: False
+ required: false
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
- required: False
+ required: false
type: str
recursive:
description:
- - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
- required: False
+ - Setting this to false prevents monitoring of any subdirectories encountered within this data input.
+ required: false
type: bool
- default: False
+ default: false
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
- required: False
+ required: false
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
- required: False
+ required: false
type: str
time_before_close:
description:
@@ -121,12 +122,12 @@ options:
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
- required: False
+ required: false
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
- required: False
+ required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -136,22 +137,18 @@ EXAMPLES = """
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
- recursive: True
+ recursive: true
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
@@ -197,8 +194,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
query_dict = utils.remove_empties(query_dict)
@@ -207,13 +204,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -224,11 +223,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -238,16 +239,14 @@ def main():
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
changed=True,
@@ -255,9 +254,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/data_input_network.py b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
index 5771eb9cc..14905563a 100644
--- a/ansible_collections/splunk/es/plugins/modules/data_input_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/data_input_network.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,7 +26,7 @@ options:
protocol:
description:
- Choose between tcp or udp
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -37,7 +38,7 @@ options:
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
- required: False
+ required: false
type: str
choices:
- "ip"
@@ -51,7 +52,7 @@ options:
- "absent"
- "enabled"
- "disable"
- required: False
+ required: false
default: "present"
type: str
datatype:
@@ -62,12 +63,12 @@ options:
- "cooked"
- "raw"
default: "raw"
- required: False
+ required: false
type: str
host:
description:
- Host from which the indexer gets data.
- required: False
+ required: false
type: str
index:
description:
@@ -76,7 +77,7 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
queue:
description:
@@ -89,7 +90,7 @@ options:
- "parsingQueue"
- "indexQueue"
type: str
- required: False
+ required: false
default: "parsingQueue"
rawTcpDoneTimeout:
description:
@@ -98,16 +99,16 @@ options:
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
- required: False
+ required: false
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
- required: False
+ required: false
type: str
ssl:
description:
- Enable or disble ssl for the data stream
- required: False
+ required: false
type: bool
source:
description:
@@ -126,7 +127,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=false).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -140,16 +141,14 @@ EXAMPLES = """
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
state=dict(
required=False,
@@ -178,9 +177,7 @@ def main():
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
- datatype=dict(
- required=False, choices=["cooked", "raw"], default="raw"
- ),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -198,7 +195,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
if module.params["state"] in ["present", "enabled", "disabled"]:
@@ -211,13 +208,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -236,11 +235,15 @@ def main():
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
module.exit_json(
- changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} disabled.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -251,9 +254,7 @@ def main():
),
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
@@ -261,7 +262,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
module.exit_json(
changed=True,
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
index 29099424e..0947c80fc 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_event.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -49,7 +50,7 @@ options:
description:
- Splunk Security Domain
type: str
- required: False
+ required: false
choices:
- "access"
- "endpoint"
@@ -62,7 +63,7 @@ options:
description:
- Severity rating
type: str
- required: False
+ required: false
choices:
- "informational"
- "low"
@@ -75,12 +76,12 @@ options:
description:
- Default owner of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
default_status:
description:
- Default status of the notable event, if unset it will default to Splunk System Defaults
type: str
- required: False
+ required: false
choices:
- "unassigned"
- "new"
@@ -92,19 +93,19 @@ options:
description:
- Name for drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_search:
description:
- Drill down search, Supports variable substitution with fields from the matching event.
type: str
- required: False
+ required: false
drill_down_earliest_offset:
description:
- Set the amount of time before the triggering event to search for related
events. For example, 2h. Use \"$info_min_time$\" to set the drill-down time
to match the earliest time of the search
type: str
- required: False
+ required: false
default: \"$info_min_time$\"
drill_down_latest_offset:
description:
@@ -112,20 +113,21 @@ options:
events. For example, 1m. Use \"$info_max_time$\" to set the drill-down
time to match the latest time of the search
type: str
- required: False
+ required: false
default: \"$info_max_time$\"
investigation_profiles:
description:
- Investigation profile to assiciate the notable event with.
type: str
- required: False
+ required: false
next_steps:
description:
- List of adaptive responses that should be run next
- Describe next steps and response actions that an analyst could take to address this threat.
type: list
elements: str
- required: False
+ required: false
+ default: []
recommended_actions:
description:
- List of adaptive responses that are recommended to be run next
@@ -134,7 +136,8 @@ options:
making it easier to find them among the longer list of available actions.
type: list
elements: str
- required: False
+ required: false
+ default: []
asset_extraction:
description:
- list of assets to extract, select any one or many of the available choices
@@ -151,7 +154,7 @@ options:
- dest
- dvc
- orig_host
- required: False
+ required: false
identity_extraction:
description:
- list of identity fields to extract, select any one or many of the available choices
@@ -164,11 +167,10 @@ options:
default:
- user
- src_user
- required: False
-
+ required: false
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
-# FIXME - adaptive response action association is probaby going to need to be a separate module we stitch together in a role
+# FIXME - adaptive response action association is probably going to need to be a separate module we stitch together in a role
EXAMPLES = """
- name: Example of using splunk.es.adaptive_response_notable_event module
@@ -187,19 +189,15 @@ EXAMPLES = """
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
correlation_search_name=dict(required=True, type="str"),
@@ -244,17 +242,22 @@ def main():
drill_down_name=dict(required=False, type="str"),
drill_down_search=dict(required=False, type="str"),
drill_down_earliest_offset=dict(
- required=False, type="str", default="$info_min_time$"
+ required=False,
+ type="str",
+ default="$info_min_time$",
),
drill_down_latest_offset=dict(
- required=False, type="str", default="$info_max_time$"
+ required=False,
+ type="str",
+ default="$info_max_time$",
),
investigation_profiles=dict(required=False, type="str"),
- next_steps=dict(
- required=False, type="list", elements="str", default=[]
- ),
+ next_steps=dict(required=False, type="list", elements="str", default=[]),
recommended_actions=dict(
- required=False, type="list", elements="str", default=[]
+ required=False,
+ type="list",
+ elements="str",
+ default=[],
),
asset_extraction=dict(
required=False,
@@ -283,8 +286,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
- )
+ quote_plus(module.params["correlation_search_name"]),
+ ),
)
# Have to custom craft the data here because they overload the saved searches
@@ -297,9 +300,7 @@ def main():
# request_post_data['action.notable.param.extract_identities'] = [\"src_user\",\"user\"]
if module.params["next_steps"]:
if len(module.params["next_steps"]) == 1:
- next_steps = "[[action|{0}]]".format(
- module.params["next_steps"][0]
- )
+ next_steps = "[[action|{0}]]".format(module.params["next_steps"][0])
else:
next_steps = ""
for next_step in module.params["next_steps"]:
@@ -312,66 +313,48 @@ def main():
# but I don't know what it is/means because there's no docs on it
next_steps_dict = {"version": 1, "data": next_steps}
request_post_data["action.notable.param.next_steps"] = json.dumps(
- next_steps_dict
+ next_steps_dict,
)
if module.params["recommended_actions"]:
if len(module.params["recommended_actions"]) == 1:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = module.params["recommended_actions"][0]
+ request_post_data["action.notable.param.recommended_actions"] = module.params[
+ "recommended_actions"
+ ][0]
else:
- request_post_data[
- "action.notable.param.recommended_actions"
- ] = ",".join(module.params["recommended_actions"])
+ request_post_data["action.notable.param.recommended_actions"] = ",".join(
+ module.params["recommended_actions"],
+ )
- request_post_data["action.notable.param.rule_description"] = module.params[
- "description"
- ]
- request_post_data["action.notable.param.rule_title"] = module.params[
- "name"
- ]
- request_post_data["action.notable.param.security_domain"] = module.params[
- "security_domain"
- ]
- request_post_data["action.notable.param.severity"] = module.params[
- "severity"
+ request_post_data["action.notable.param.rule_description"] = module.params["description"]
+ request_post_data["action.notable.param.rule_title"] = module.params["name"]
+ request_post_data["action.notable.param.security_domain"] = module.params["security_domain"]
+ request_post_data["action.notable.param.severity"] = module.params["severity"]
+ request_post_data["action.notable.param.asset_extraction"] = module.params["asset_extraction"]
+ request_post_data["action.notable.param.identity_extraction"] = module.params[
+ "identity_extraction"
]
- request_post_data["action.notable.param.asset_extraction"] = module.params[
- "asset_extraction"
- ]
- request_post_data[
- "action.notable.param.identity_extraction"
- ] = module.params["identity_extraction"]
# NOTE: this field appears to be hard coded when you create this via the splunk web UI
# but I don't know what it is/means because there's no docs on it
request_post_data["action.notable.param.verbose"] = "0"
if module.params["default_owner"]:
- request_post_data[
- "action.notable.param.default_owner"
- ] = module.params["default_owner"]
+ request_post_data["action.notable.param.default_owner"] = module.params["default_owner"]
if module.params["default_status"]:
- request_post_data[
- "action.notable.param.default_status"
- ] = module.params["default_status"]
+ request_post_data["action.notable.param.default_status"] = module.params["default_status"]
request_post_data = utils.remove_empties(request_post_data)
if query_dict:
- request_post_data["search"] = query_dict["entry"][0]["content"][
- "search"
- ]
+ request_post_data["search"] = query_dict["entry"][0]["content"]["search"]
if "actions" in query_dict["entry"][0]["content"]:
if query_dict["entry"][0]["content"]["actions"] == "notable":
pass
elif (
- len(query_dict["entry"][0]["content"]["actions"].split(","))
- > 0
- and "notable"
- not in query_dict["entry"][0]["content"]["actions"]
+ len(query_dict["entry"][0]["content"]["actions"].split(",")) > 0
+ and "notable" not in query_dict["entry"][0]["content"]["actions"]
):
request_post_data["actions"] = (
query_dict["entry"][0]["content"]["actions"] + ", notable"
@@ -389,12 +372,14 @@ def main():
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
- request_post_data[arg]
+ request_post_data[arg],
):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -405,15 +390,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
@@ -430,7 +413,9 @@ def main():
del query_dict["entry"][0]["content"][arg]
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -441,21 +426,17 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["correlation_search_name"])
+ quote_plus(module.params["correlation_search_name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
changed=True,
- msg="{0} updated.".format(
- module.params["correlation_search_name"]
- ),
+ msg="{0} updated.".format(module.params["correlation_search_name"]),
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
index fa680a511..2ee6461ae 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_adaptive_response_notable_events.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -255,19 +256,19 @@ EXAMPLES = """
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: merged
@@ -334,19 +335,19 @@ EXAMPLES = """
drilldown_earliest_offset: $info_min_time$
drilldown_latest_offset: $info_max_time$
extract_artifacts:
- asset:
- - src
- - dest
- identity:
- - src_user
- - user
- - src_user_id
+ asset:
+ - src
+ - dest
+ identity:
+ - src_user
+ - user
+ - src_user_id
next_steps:
- - makestreams
+ - makestreams
name: ansible_test_notable
recommended_actions:
- - email
- - logevent
+ - email
+ - logevent
security_domain: threat
severity: high
state: replaced
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
index 9c865507b..1664c8c8b 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,29 +26,29 @@ options:
name:
description:
- Name of coorelation search
- required: True
+ required: true
type: str
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
- required: True
+ required: true
type: str
state:
description:
- Add, remove, enable, or disiable a correlation search.
- required: True
+ required: true
choices: [ "present", "absent", "enabled", "disabled" ]
type: str
search:
description:
- SPL search string
type: str
- required: True
+ required: true
app:
description:
- Splunk app to associate the correlation seach with
type: str
- required: False
+ required: false
default: "SplunkEnterpriseSecuritySuite"
ui_dispatch_context:
description:
@@ -55,18 +56,18 @@ options:
event or links in an email adaptive response action. If None, uses the
Application Context.
type: str
- required: False
+ required: false
time_earliest:
description:
- Earliest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "-24h"
time_latest:
description:
- Latest time using relative time modifiers.
type: str
- required: False
+ required: false
default: "now"
cron_schedule:
description:
@@ -74,7 +75,7 @@ options:
- For example C('*/5 * * * *') (every 5 minutes) or C('0 21 * * *') (every day at 9 PM).
- Real-time searches use a default schedule of C('*/5 * * * *').
type: str
- required: False
+ required: false
default: "*/5 * * * *"
scheduling:
description:
@@ -83,7 +84,7 @@ options:
Learn more:
https://docs.splunk.com/Documentation/Splunk/7.2.3/Report/Configurethepriorityofscheduledreports#Real-time_scheduling_and_continuous_scheduling
type: str
- required: False
+ required: false
default: "real-time"
choices:
- "real-time"
@@ -94,7 +95,7 @@ options:
to improve efficiency when there are many concurrently scheduled reports.
The "auto" setting automatically determines the best window width for the report.
type: str
- required: False
+ required: false
default: "0"
schedule_priority:
description:
@@ -102,7 +103,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "Default"
choices:
- "Default"
@@ -114,7 +115,7 @@ options:
it above other searches of the same scheduling mode, or "Highest" to
prioritize it above other searches regardless of mode. Use with discretion.
type: str
- required: False
+ required: false
default: "number of events"
choices:
- "number of events"
@@ -125,7 +126,7 @@ options:
description:
- Conditional to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "greater than"
choices:
- "greater than"
@@ -138,24 +139,24 @@ options:
description:
- Value to pass to C(trigger_alert_when)
type: str
- required: False
+ required: false
default: "10"
throttle_window_duration:
description:
- "How much time to ignore other events that match the field values specified in Fields to group by."
type: str
- required: False
+ required: false
throttle_fields_to_group_by:
description:
- "Type the fields to consider for matching events for throttling."
type: str
- required: False
+ required: false
suppress_alerts:
description:
- "To suppress alerts from this correlation search or not"
type: bool
- required: False
- default: False
+ required: false
+ default: false
notes:
- >
The following options are not yet supported:
@@ -174,30 +175,22 @@ EXAMPLES = """
state: "present"
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
-
-from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus, urlencode
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
def main():
argspec = dict(
name=dict(required=True, type="str"),
description=dict(required=True, type="str"),
- state=dict(
- choices=["present", "absent", "enabled", "disabled"], required=True
- ),
+ state=dict(choices=["present", "absent", "enabled", "disabled"], required=True),
search=dict(required=True, type="str"),
- app=dict(
- type="str", required=False, default="SplunkEnterpriseSecuritySuite"
- ),
+ app=dict(type="str", required=False, default="SplunkEnterpriseSecuritySuite"),
ui_dispatch_context=dict(type="str", required=False),
time_earliest=dict(type="str", required=False, default="-24h"),
time_latest=dict(type="str", required=False, default="now"),
@@ -239,9 +232,7 @@ def main():
"rises by",
],
),
- trigger_alert_when_value=dict(
- type="str", required=False, default="10"
- ),
+ trigger_alert_when_value=dict(type="str", required=False, default="10"),
throttle_window_duration=dict(type="str", required=False),
throttle_fields_to_group_by=dict(type="str", required=False),
suppress_alerts=dict(type="bool", required=False, default=False),
@@ -264,8 +255,8 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
@@ -283,12 +274,8 @@ def main():
request_post_data["search"] = module.params["search"]
request_post_data["request.ui_dispatch_app"] = module.params["app"]
if module.params["ui_dispatch_context"]:
- request_post_data["request.ui_dispatch_context"] = module.params[
- "ui_dispatch_context"
- ]
- request_post_data["dispatch.earliest_time"] = module.params[
- "time_earliest"
- ]
+ request_post_data["request.ui_dispatch_context"] = module.params["ui_dispatch_context"]
+ request_post_data["dispatch.earliest_time"] = module.params["time_earliest"]
request_post_data["dispatch.latest_time"] = module.params["time_latest"]
request_post_data["cron_schedule"] = module.params["cron_schedule"]
if module.params["scheduling"] == "real-time":
@@ -296,16 +283,10 @@ def main():
else:
request_post_data["realtime_schedule"] = False
request_post_data["schedule_window"] = module.params["schedule_window"]
- request_post_data["schedule_priority"] = module.params[
- "schedule_priority"
- ].lower()
+ request_post_data["schedule_priority"] = module.params["schedule_priority"].lower()
request_post_data["alert_type"] = module.params["trigger_alert_when"]
- request_post_data["alert_comparator"] = module.params[
- "trigger_alert_when_condition"
- ]
- request_post_data["alert_threshold"] = module.params[
- "trigger_alert_when_value"
- ]
+ request_post_data["alert_comparator"] = module.params["trigger_alert_when_condition"]
+ request_post_data["alert_threshold"] = module.params["trigger_alert_when_value"]
request_post_data["alert.suppress"] = module.params["suppress_alerts"]
request_post_data["disabled"] = module_disabled_state
@@ -316,13 +297,15 @@ def main():
needs_change = False
for arg in request_post_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_post_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_post_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -337,12 +320,14 @@ def main():
] # If this is present, splunk assumes we're trying to create a new one wit the same name
splunk_data = splunk_request.create_update(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
+ quote_plus(module.params["name"]),
),
data=urlencode(request_post_data),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -350,16 +335,12 @@ def main():
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
data=urlencode(request_post_data),
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
- "services/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ "services/saved/searches/{0}".format(quote_plus(module.params["name"])),
)
module.exit_json(
changed=True,
@@ -367,9 +348,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
index 0ab756989..ecb36ce66 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_search_info.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -40,15 +41,13 @@ EXAMPLES = """
"""
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six.moves.urllib.parse import quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible.module_utils.six.moves.urllib.parse import quote_plus
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(name=dict(required=False, type="str"))
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -62,15 +61,15 @@ def main():
try:
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
except HTTPError as e:
# the data monitor doesn't exist
query_dict = {}
else:
query_dict = splunk_request.get_by_path(
- "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches"
+ "servicesNS/nobody/SplunkEnterpriseSecuritySuite/saved/searches",
)
module.exit_json(changed=False, splunk_correlation_search_info=query_dict)
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
index ac834d1b9..bcecf9926 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_correlation_searches.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -29,12 +30,12 @@ options:
description:
- Name of correlation search
type: str
- required: True
+ required: true
disabled:
description:
- Disable correlation search
type: bool
- default: False
+ default: false
description:
description:
- Description of the coorelation search, this will populate the description field for the web console
@@ -192,7 +193,7 @@ options:
description:
- To suppress alerts from this correlation search or not
type: bool
- default: False
+ default: false
running_config:
description:
- The module, by default, will connect to the remote device and retrieve the current
@@ -319,7 +320,7 @@ EXAMPLES = """
throttle_window_duration: 5s
throttle_fields_to_group_by:
- test_field1
- suppress_alerts: False
+ suppress_alerts: false
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
@@ -426,7 +427,7 @@ EXAMPLES = """
throttle_fields_to_group_by:
- test_field1
- test_field2
- suppress_alerts: True
+ suppress_alerts: true
search: >
'| tstats summariesonly=true values(\"Authentication.tag\") as \"tag\",dc(\"Authentication.user\") as \"user_count\",dc(\"Authent'
'ication.dest\") as \"dest_count\",count from datamodel=\"Authentication\".\"Authentication\" where nodename=\"Authentication.Fai'
@@ -606,7 +607,6 @@ EXAMPLES = """
# "ui_dispatch_context": "SplunkEnterpriseSecuritySuite"
# },
# ],
-
"""
RETURN = """
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
index 080d23d3b..b0108d74a 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_monitor.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
state:
description:
- Add or remove a data source.
- required: True
+ required: true
choices:
- "present"
- "absent"
@@ -38,41 +39,41 @@ options:
blacklist:
description:
- Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.
- required: False
+ required: false
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
- required: False
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
+ required: false
type: bool
- default: False
+ default: false
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
- required: False
+ - If set to C(true), the name value is checked to ensure that it exists.
+ required: false
type: bool
crc_salt:
description:
- A string that modifies the file tracking identity for files in this input.
The magic value <SOURCE> invokes special behavior (see admin documentation).
- required: False
+ required: false
type: str
disabled:
description:
- Indicates if input monitoring is disabled.
- required: False
- default: False
+ required: false
+ default: false
type: bool
followTail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
- required: False
+ - If set to C(true), files that are seen for the first time is read from the end.
+ required: false
type: bool
- default: False
+ default: false
host:
description:
- The value to populate in the host field for events from this data input.
- required: False
+ required: false
type: str
host_regex:
description:
@@ -80,40 +81,40 @@ options:
matches this regular expression, the captured value is used to populate
the host field for events from this data input. The regular expression
must have one capture group.
- required: False
+ required: false
type: str
host_segment:
description:
- Use the specified slash-separate segment of the filepath as the host field value.
- required: False
+ required: false
type: int
ignore_older_than:
description:
- Specify a time value. If the modification time of a file being monitored
falls outside of this rolling time window, the file is no longer being monitored.
- required: False
+ required: false
type: str
index:
description:
- Which index events from this input should be stored in. Defaults to default.
- required: False
+ required: false
type: str
recursive:
description:
- - Setting this to False prevents monitoring of any subdirectories encountered within this data input.
- required: False
+ - Setting this to false prevents monitoring of any subdirectories encountered within this data input.
+ required: false
type: bool
- default: False
+ default: false
rename_source:
description:
- The value to populate in the source field for events from this data input.
The same source should not be used for multiple data inputs.
- required: False
+ required: false
type: str
sourcetype:
description:
- The value to populate in the sourcetype field for incoming events.
- required: False
+ required: false
type: str
time_before_close:
description:
@@ -121,12 +122,12 @@ options:
file is kept open for a minimum of the number of seconds specified in
this value. After this period has elapsed, the file is checked again for
more data.
- required: False
+ required: false
type: int
whitelist:
description:
- Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.
- required: False
+ required: false
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -136,22 +137,18 @@ EXAMPLES = """
splunk.es.data_input_monitor:
name: "/var/log/example.log"
state: "present"
- recursive: True
+ recursive: true
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
- utils,
-)
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import utils
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
name=dict(required=True, type="str"),
state=dict(choices=["present", "absent"], required=True),
@@ -197,8 +194,8 @@ def main():
query_dict = splunk_request.get_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
query_dict = utils.remove_empties(query_dict)
@@ -207,13 +204,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -224,11 +223,13 @@ def main():
if needs_change:
splunk_data = splunk_request.create_update(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -238,16 +239,14 @@ def main():
"servicesNS/nobody/search/data/inputs/monitor",
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
if module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
"servicesNS/nobody/search/data/inputs/monitor/{0}".format(
- quote_plus(module.params["name"])
- )
+ quote_plus(module.params["name"]),
+ ),
)
module.exit_json(
changed=True,
@@ -255,9 +254,7 @@ def main():
splunk_data=splunk_data,
)
- module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
- )
+ module.exit_json(changed=False, msg="Nothing to do.", splunk_data=query_dict)
if __name__ == "__main__":
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
index 5771eb9cc..14905563a 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_input_network.py
@@ -8,6 +8,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,7 +26,7 @@ options:
protocol:
description:
- Choose between tcp or udp
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -37,7 +38,7 @@ options:
- C(dns) sets the host to the reverse DNS entry for the IP address of the remote server sending data.
- C(none) leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
default: "ip"
- required: False
+ required: false
type: str
choices:
- "ip"
@@ -51,7 +52,7 @@ options:
- "absent"
- "enabled"
- "disable"
- required: False
+ required: false
default: "present"
type: str
datatype:
@@ -62,12 +63,12 @@ options:
- "cooked"
- "raw"
default: "raw"
- required: False
+ required: false
type: str
host:
description:
- Host from which the indexer gets data.
- required: False
+ required: false
type: str
index:
description:
@@ -76,7 +77,7 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
queue:
description:
@@ -89,7 +90,7 @@ options:
- "parsingQueue"
- "indexQueue"
type: str
- required: False
+ required: false
default: "parsingQueue"
rawTcpDoneTimeout:
description:
@@ -98,16 +99,16 @@ options:
number of seconds, it adds a Done-key. This implies the last event is completely received.
default: 10
type: int
- required: False
+ required: false
restrictToHost:
description:
- Allows for restricting this input to only accept data from the host specified here.
- required: False
+ required: false
type: str
ssl:
description:
- Enable or disble ssl for the data stream
- required: False
+ required: false
type: bool
source:
description:
@@ -126,7 +127,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=false).
type: str
author: Ansible Security Automation Team (@maxamillion) <https://github.com/ansible-security>
"""
@@ -140,16 +141,14 @@ EXAMPLES = """
"""
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
+from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import quote_plus
-from ansible_collections.splunk.es.plugins.module_utils.splunk import (
- SplunkRequest,
-)
+from ansible_collections.splunk.es.plugins.module_utils.splunk import SplunkRequest
-def main():
+def main():
argspec = dict(
state=dict(
required=False,
@@ -178,9 +177,7 @@ def main():
ssl=dict(required=False, type="bool", default=None),
source=dict(required=False, type="str", default=None),
sourcetype=dict(required=False, type="str", default=None),
- datatype=dict(
- required=False, choices=["cooked", "raw"], default="raw"
- ),
+ datatype=dict(required=False, choices=["cooked", "raw"], default="raw"),
)
module = AnsibleModule(argument_spec=argspec, supports_check_mode=True)
@@ -198,7 +195,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
if module.params["state"] in ["present", "enabled", "disabled"]:
@@ -211,13 +208,15 @@ def main():
needs_change = False
for arg in request_data:
if arg in query_dict["entry"][0]["content"]:
- if to_text(
- query_dict["entry"][0]["content"][arg]
- ) != to_text(request_data[arg]):
+ if to_text(query_dict["entry"][0]["content"][arg]) != to_text(
+ request_data[arg],
+ ):
needs_change = True
if not needs_change:
module.exit_json(
- changed=False, msg="Nothing to do.", splunk_data=query_dict
+ changed=False,
+ msg="Nothing to do.",
+ splunk_data=query_dict,
)
if module.check_mode and needs_change:
module.exit_json(
@@ -236,11 +235,15 @@ def main():
)
if module.params["state"] in ["present", "enabled"]:
module.exit_json(
- changed=True, msg="{0} updated.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} updated.",
+ splunk_data=splunk_data,
)
else:
module.exit_json(
- changed=True, msg="{0} disabled.", splunk_data=splunk_data
+ changed=True,
+ msg="{0} disabled.",
+ splunk_data=splunk_data,
)
else:
# Create it
@@ -251,9 +254,7 @@ def main():
),
data=_data,
)
- module.exit_json(
- changed=True, msg="{0} created.", splunk_data=splunk_data
- )
+ module.exit_json(changed=True, msg="{0} created.", splunk_data=splunk_data)
elif module.params["state"] == "absent":
if query_dict:
splunk_data = splunk_request.delete_by_path(
@@ -261,7 +262,7 @@ def main():
quote_plus(module.params["protocol"]),
quote_plus(module.params["datatype"]),
quote_plus(module.params["name"]),
- )
+ ),
)
module.exit_json(
changed=True,
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
index 0f4922f77..1f664afb2 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_monitor.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -26,7 +27,7 @@ options:
name:
description:
- The file or directory path to monitor on the system.
- required: True
+ required: true
type: str
blacklist:
description:
@@ -34,13 +35,13 @@ options:
type: str
check_index:
description:
- - If set to C(True), the index value is checked to ensure that it is the name of a valid index.
+ - If set to C(true), the index value is checked to ensure that it is the name of a valid index.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
check_path:
description:
- - If set to C(True), the name value is checked to ensure that it exists.
+ - If set to C(true), the name value is checked to ensure that it exists.
- This parameter is not returned back by Splunk while obtaining object information.
It is therefore left out while performing idempotency checks
type: bool
@@ -53,10 +54,10 @@ options:
description:
- Indicates if input monitoring is disabled.
type: bool
- default: False
+ default: false
follow_tail:
description:
- - If set to C(True), files that are seen for the first time is read from the end.
+ - If set to C(true), files that are seen for the first time is read from the end.
type: bool
host:
description:
@@ -179,8 +180,8 @@ EXAMPLES = """
config:
- name: "/var/log"
blacklist: "//var/log/[a-z]/gm"
- check_index: True
- check_path: True
+ check_index: true
+ check_path: true
crc_salt: <SOURCE>
rename_source: "test"
whitelist: "//var/log/[0-9]/gm"
@@ -283,7 +284,6 @@ EXAMPLES = """
# "name": "/var/log"
# }
# ],
-
"""
RETURN = """
diff --git a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
index 688e806f1..cf259c2d6 100644
--- a/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
+++ b/ansible_collections/splunk/es/plugins/modules/splunk_data_inputs_network.py
@@ -6,6 +6,7 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -25,12 +26,12 @@ options:
name:
description:
- The input port which receives raw data.
- required: True
+ required: true
type: str
protocol:
description:
- Choose whether to manage TCP or UDP inputs
- required: True
+ required: true
choices:
- 'tcp'
- 'udp'
@@ -58,7 +59,7 @@ options:
- "raw"
- "splunktcptoken"
- "ssl"
- required: False
+ required: false
type: str
disabled:
description:
@@ -124,7 +125,7 @@ options:
description:
- Set the source type for events from this input.
- '"sourcetype=" is automatically prepended to <string>.'
- - Defaults to audittrail (if signedaudit=True) or fschange (if signedaudit=False).
+ - Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
type: str
token:
description:
@@ -358,7 +359,7 @@ EXAMPLES = """
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
raw_tcp_done_timeout: 9
restrict_to_host: default
queue: parsingQueue
@@ -409,7 +410,7 @@ EXAMPLES = """
datatype: cooked
name: 8101
connection_host: ip
- disabled: False
+ disabled: false
restrict_to_host: default
state: merged
@@ -460,7 +461,7 @@ EXAMPLES = """
# ],
# "before": [],
-- name: To add the Splunk SSL
+- name: To add the Splunk SSL
splunk.es.splunk_data_inputs_network:
config:
- protocol: tcp
@@ -531,7 +532,7 @@ EXAMPLES = """
datatype: raw
name: 8100
connection_host: ip
- disabled: True
+ disabled: true
host: "$decideOnStartup"
index: default
queue: parsingQueue
@@ -575,7 +576,6 @@ EXAMPLES = """
# "sourcetype": "test_source_type"
# }
# ],
-
"""
RETURN = """