summaryrefslogtreecommitdiffstats
path: root/ansible_collections/dellemc/openmanage/plugins/module_utils
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:03:42 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:03:42 +0000
commit66cec45960ce1d9c794e9399de15c138acb18aed (patch)
tree59cd19d69e9d56b7989b080da7c20ef1a3fe2a5a /ansible_collections/dellemc/openmanage/plugins/module_utils
parentInitial commit. (diff)
downloadansible-upstream.tar.xz
ansible-upstream.zip
Adding upstream version 7.3.0+dfsg.upstream/7.3.0+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/dellemc/openmanage/plugins/module_utils')
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py104
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py377
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py399
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py219
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py350
6 files changed, 1449 insertions, 0 deletions
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/__init__.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py
new file mode 100644
index 00000000..fee5339c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.1
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import os
+try:
+ from omsdk.sdkinfra import sdkinfra
+ from omsdk.sdkcreds import UserCredentials
+ from omsdk.sdkfile import FileOnShare, file_share_manager
+ from omsdk.sdkprotopref import ProtoPreference, ProtocolEnum
+ from omsdk.http.sdkwsmanbase import WsManOptions
+ HAS_OMSDK = True
+except ImportError:
+ HAS_OMSDK = False
+
+
+idrac_auth_params = {
+ "idrac_ip": {"required": True, "type": 'str'},
+ "idrac_user": {"required": True, "type": 'str'},
+ "idrac_password": {"required": True, "type": 'str', "aliases": ['idrac_pwd'], "no_log": True},
+ "idrac_port": {"required": False, "default": 443, "type": 'int'},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+}
+
+
+class iDRACConnection:
+
+ def __init__(self, module_params):
+ if not HAS_OMSDK:
+ raise ImportError("Dell EMC OMSDK library is required for this module")
+ self.idrac_ip = module_params['idrac_ip']
+ self.idrac_user = module_params['idrac_user']
+ self.idrac_pwd = module_params['idrac_password']
+ self.idrac_port = module_params['idrac_port']
+ if not all((self.idrac_ip, self.idrac_user, self.idrac_pwd)):
+ raise ValueError("hostname, username and password required")
+ self.handle = None
+ self.creds = UserCredentials(self.idrac_user, self.idrac_pwd)
+ self.validate_certs = module_params.get("validate_certs", False)
+ self.ca_path = module_params.get("ca_path")
+ verify_ssl = False
+ if self.validate_certs is True:
+ if self.ca_path is None:
+ self.ca_path = self._get_omam_ca_env()
+ verify_ssl = self.ca_path
+ timeout = module_params.get("timeout", 30)
+ if not timeout or type(timeout) != int:
+ timeout = 30
+ self.pOp = WsManOptions(port=self.idrac_port, read_timeout=timeout, verify_ssl=verify_ssl)
+ self.sdk = sdkinfra()
+ if self.sdk is None:
+ msg = "Could not initialize iDRAC drivers."
+ raise RuntimeError(msg)
+
+ def __enter__(self):
+ self.sdk.importPath()
+ protopref = ProtoPreference(ProtocolEnum.WSMAN)
+ protopref.include_only(ProtocolEnum.WSMAN)
+ self.handle = self.sdk.get_driver(self.sdk.driver_enum.iDRAC, self.idrac_ip, self.creds,
+ protopref=protopref, pOptions=self.pOp)
+ if self.handle is None:
+ msg = "Unable to communicate with iDRAC {0}. This may be due to one of the following: " \
+ "Incorrect username or password, unreachable iDRAC IP or " \
+ "a failure in TLS/SSL handshake.".format(self.idrac_ip)
+ raise RuntimeError(msg)
+ return self.handle
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.handle.disconnect()
+ return False
+
+ def _get_omam_ca_env(self):
+ """Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or True as ssl has to
+ be validated from omsdk with single param and is default to false in omsdk"""
+ return (os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE")
+ or os.environ.get("OMAM_CA_BUNDLE") or True)
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py
new file mode 100644
index 00000000..168c8277
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py
@@ -0,0 +1,377 @@
+# -*- coding: utf-8 -*-
+
+# Dell EMC OpenManage Ansible Modules
+# Version 5.5.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import re
+import time
+import os
+from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.six.moves.urllib.parse import urlencode
+
+idrac_auth_params = {
+ "idrac_ip": {"required": True, "type": 'str'},
+ "idrac_user": {"required": True, "type": 'str'},
+ "idrac_password": {"required": True, "type": 'str', "aliases": ['idrac_pwd'], "no_log": True},
+ "idrac_port": {"required": False, "default": 443, "type": 'int'},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+
+}
+
+SESSION_RESOURCE_COLLECTION = {
+ "SESSION": "/redfish/v1/Sessions",
+ "SESSION_ID": "/redfish/v1/Sessions/{Id}",
+}
+MANAGER_URI = "/redfish/v1/Managers/iDRAC.Embedded.1"
+EXPORT_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/EID_674_Manager.ExportSystemConfiguration"
+IMPORT_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/EID_674_Manager.ImportSystemConfiguration"
+IMPORT_PREVIEW = "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/EID_674_Manager.ImportSystemConfigurationPreview"
+
+
+class OpenURLResponse(object):
+ """Handles HTTPResponse"""
+
+ def __init__(self, resp):
+ self.body = None
+ self.resp = resp
+ if self.resp:
+ self.body = self.resp.read()
+
+ @property
+ def json_data(self):
+ try:
+ return json.loads(self.body)
+ except ValueError:
+ raise ValueError("Unable to parse json")
+
+ @property
+ def status_code(self):
+ return self.resp.getcode()
+
+ @property
+ def success(self):
+ status = self.status_code
+ return status >= 200 & status <= 299
+
+ @property
+ def headers(self):
+ return self.resp.headers
+
+ @property
+ def reason(self):
+ return self.resp.reason
+
+
+class iDRACRedfishAPI(object):
+ """REST api for iDRAC modules."""
+
+ def __init__(self, module_params, req_session=False):
+ self.ipaddress = module_params['idrac_ip']
+ self.username = module_params['idrac_user']
+ self.password = module_params['idrac_password']
+ self.port = module_params['idrac_port']
+ self.validate_certs = module_params.get("validate_certs", False)
+ self.ca_path = module_params.get("ca_path")
+ self.timeout = module_params.get("timeout", 30)
+ self.use_proxy = module_params.get("use_proxy", True)
+ self.req_session = req_session
+ self.session_id = None
+ self.protocol = 'https'
+ self._headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+
+ def _get_url(self, uri):
+ return "{0}://{1}:{2}{3}".format(self.protocol, self.ipaddress, self.port, uri)
+
+ def _build_url(self, path, query_param=None):
+ """builds complete url"""
+ url = path
+ base_uri = self._get_url(url)
+ if path:
+ url = base_uri
+ if query_param:
+ url += "?{0}".format(urlencode(query_param))
+ return url
+
+ def _url_common_args_spec(self, method, api_timeout, headers=None):
+ """Creates an argument common spec"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ if api_timeout is None:
+ api_timeout = self.timeout
+ if self.ca_path is None:
+ self.ca_path = self._get_omam_ca_env()
+ url_kwargs = {
+ "method": method,
+ "validate_certs": self.validate_certs,
+ "ca_path": self.ca_path,
+ "use_proxy": self.use_proxy,
+ "headers": req_header,
+ "timeout": api_timeout,
+ "follow_redirects": 'all',
+ }
+ return url_kwargs
+
+ def _args_without_session(self, path, method, api_timeout, headers=None):
+ """Creates an argument spec in case of basic authentication"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ if not (path == SESSION_RESOURCE_COLLECTION["SESSION"] and method == 'POST'):
+ url_kwargs["url_username"] = self.username
+ url_kwargs["url_password"] = self.password
+ url_kwargs["force_basic_auth"] = True
+ return url_kwargs
+
+ def _args_with_session(self, method, api_timeout, headers=None):
+ """Creates an argument spec, in case of authentication with session"""
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ url_kwargs["force_basic_auth"] = False
+ return url_kwargs
+
+ def invoke_request(self, uri, method, data=None, query_param=None, headers=None, api_timeout=None, dump=True):
+ try:
+ if 'X-Auth-Token' in self._headers:
+ url_kwargs = self._args_with_session(method, api_timeout, headers=headers)
+ else:
+ url_kwargs = self._args_without_session(uri, method, api_timeout, headers=headers)
+ if data and dump:
+ data = json.dumps(data)
+ url = self._build_url(uri, query_param=query_param)
+ resp = open_url(url, data=data, **url_kwargs)
+ resp_data = OpenURLResponse(resp)
+ except (HTTPError, URLError, SSLValidationError, ConnectionError) as err:
+ raise err
+ return resp_data
+
+ def __enter__(self):
+ """Creates sessions by passing it to header"""
+ if self.req_session:
+ payload = {'UserName': self.username,
+ 'Password': self.password}
+ path = SESSION_RESOURCE_COLLECTION["SESSION"]
+ resp = self.invoke_request(path, 'POST', data=payload)
+ if resp and resp.success:
+ self.session_id = resp.json_data.get("Id")
+ self._headers["X-Auth-Token"] = resp.headers.get('X-Auth-Token')
+ else:
+ msg = "Could not create the session"
+ raise ConnectionError(msg)
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Deletes a session id, which is in use for request"""
+ if self.session_id:
+ path = SESSION_RESOURCE_COLLECTION["SESSION_ID"].format(Id=self.session_id)
+ self.invoke_request(path, 'DELETE')
+ return False
+
+ @property
+ def get_server_generation(self):
+ """
+ This method fetches the connected server generation.
+ :return: 14, 4.11.11.11
+ """
+ model, firmware_version = None, None
+ response = self.invoke_request(MANAGER_URI, 'GET')
+ if response.status_code == 200:
+ generation = int(re.search(r"\d+(?=G)", response.json_data["Model"]).group())
+ firmware_version = response.json_data["FirmwareVersion"]
+ return generation, firmware_version
+
+ def wait_for_job_complete(self, task_uri, job_wait=False):
+ """
+ This function wait till the job completion.
+ :param task_uri: uri to track job.
+ :param job_wait: True or False decide whether to wait till the job completion.
+ :return: object
+ """
+ response = None
+ while job_wait:
+ try:
+ response = self.invoke_request(task_uri, "GET")
+ if response.json_data.get("TaskState") == "Running":
+ time.sleep(10)
+ else:
+ break
+ except ValueError:
+ response = response.body
+ break
+ return response
+
+ def wait_for_job_completion(self, job_uri, job_wait=False, reboot=False, apply_update=False):
+ """
+ This function wait till the job completion.
+ :param job_uri: uri to track job.
+ :param job_wait: True or False decide whether to wait till the job completion.
+ :return: object
+ """
+ time.sleep(5)
+ response = self.invoke_request(job_uri, "GET")
+ while job_wait:
+ response = self.invoke_request(job_uri, "GET")
+ if response.json_data.get("PercentComplete") == 100 and \
+ response.json_data.get("JobState") == "Completed":
+ break
+ if response.json_data.get("JobState") == "Starting" and not reboot and apply_update:
+ break
+ time.sleep(30)
+ return response
+
+ def export_scp(self, export_format=None, export_use=None, target=None,
+ job_wait=False, share=None):
+ """
+ This method exports system configuration details from the system.
+ :param export_format: XML or JSON.
+ :param export_use: Default or Clone or Replace.
+ :param target: IDRAC or NIC or ALL or BIOS or RAID.
+ :param job_wait: True or False decide whether to wait till the job completion.
+ :return: exported data in requested format.
+ """
+ payload = {"ExportFormat": export_format, "ExportUse": export_use,
+ "ShareParameters": {"Target": target}}
+ if share is None:
+ share = {}
+ if share.get("share_ip") is not None:
+ payload["ShareParameters"]["IPAddress"] = share["share_ip"]
+ if share.get("share_name") is not None and share.get("share_name"):
+ payload["ShareParameters"]["ShareName"] = share["share_name"]
+ if share.get("share_type") is not None:
+ payload["ShareParameters"]["ShareType"] = share["share_type"]
+ if share.get("file_name") is not None:
+ payload["ShareParameters"]["FileName"] = share["file_name"]
+ if share.get("username") is not None:
+ payload["ShareParameters"]["Username"] = share["username"]
+ if share.get("password") is not None:
+ payload["ShareParameters"]["Password"] = share["password"]
+ response = self.invoke_request(EXPORT_URI, "POST", data=payload)
+ if response.status_code == 202 and job_wait:
+ task_uri = response.headers["Location"]
+ response = self.wait_for_job_complete(task_uri, job_wait=job_wait)
+ return response
+
+ def import_scp_share(self, shutdown_type=None, host_powerstate=None, job_wait=True,
+ target=None, import_buffer=None, share=None):
+ """
+ This method imports system configuration using share.
+ :param shutdown_type: graceful
+ :param host_powerstate: on
+ :param file_name: import.xml
+ :param job_wait: True
+ :param target: iDRAC
+ :param share: dictionary which has all the share details.
+ :return: json response
+ """
+ payload = {"ShutdownType": shutdown_type, "EndHostPowerState": host_powerstate,
+ "ShareParameters": {"Target": target}}
+ if import_buffer is not None:
+ payload["ImportBuffer"] = import_buffer
+ if share is None:
+ share = {}
+ if share.get("share_ip") is not None:
+ payload["ShareParameters"]["IPAddress"] = share["share_ip"]
+ if share.get("share_name") is not None and share.get("share_name"):
+ payload["ShareParameters"]["ShareName"] = share["share_name"]
+ if share.get("share_type") is not None:
+ payload["ShareParameters"]["ShareType"] = share["share_type"]
+ if share.get("file_name") is not None:
+ payload["ShareParameters"]["FileName"] = share["file_name"]
+ if share.get("username") is not None:
+ payload["ShareParameters"]["Username"] = share["username"]
+ if share.get("password") is not None:
+ payload["ShareParameters"]["Password"] = share["password"]
+ response = self.invoke_request(IMPORT_URI, "POST", data=payload)
+ if response.status_code == 202 and job_wait:
+ task_uri = response.headers["Location"]
+ response = self.wait_for_job_complete(task_uri, job_wait=job_wait)
+ return response
+
+ def import_preview(self, import_buffer=None, target=None, share=None, job_wait=False):
+ payload = {"ShareParameters": {"Target": target}}
+ if import_buffer is not None:
+ payload["ImportBuffer"] = import_buffer
+ if share is None:
+ share = {}
+ if share.get("share_ip") is not None:
+ payload["ShareParameters"]["IPAddress"] = share["share_ip"]
+ if share.get("share_name") is not None and share.get("share_name"):
+ payload["ShareParameters"]["ShareName"] = share["share_name"]
+ if share.get("share_type") is not None:
+ payload["ShareParameters"]["ShareType"] = share["share_type"]
+ if share.get("file_name") is not None:
+ payload["ShareParameters"]["FileName"] = share["file_name"]
+ if share.get("username") is not None:
+ payload["ShareParameters"]["Username"] = share["username"]
+ if share.get("password") is not None:
+ payload["ShareParameters"]["Password"] = share["password"]
+ response = self.invoke_request(IMPORT_PREVIEW, "POST", data=payload)
+ if response.status_code == 202 and job_wait:
+ task_uri = response.headers["Location"]
+ response = self.wait_for_job_complete(task_uri, job_wait=job_wait)
+ return response
+
+ def import_scp(self, import_buffer=None, target=None, job_wait=False):
+ """
+ This method imports system configuration details to the system.
+ :param import_buffer: import buffer payload content xml or json format
+ :param target: IDRAC or NIC or ALL or BIOS or RAID.
+ :param job_wait: True or False decide whether to wait till the job completion.
+ :return: json response
+ """
+ payload = {"ImportBuffer": import_buffer, "ShareParameters": {"Target": target}}
+ response = self.invoke_request(IMPORT_URI, "POST", data=payload)
+ if response.status_code == 202 and job_wait:
+ task_uri = response.headers["Location"]
+ response = self.wait_for_job_complete(task_uri, job_wait=job_wait)
+ return response
+
+ def get_idrac_local_account_attr(self, idrac_attribues, fqdd=None):
+ """
+ This method filtered from all the user attributes from the given idrac attributes.
+ :param idrac_attribues: all the idrac attribues in json data format.
+ :return: user attributes in dictionary format
+ """
+ user_attr = None
+ if "SystemConfiguration" in idrac_attribues:
+ sys_config = idrac_attribues.get("SystemConfiguration")
+ for comp in sys_config.get("Components"):
+ if comp.get("FQDD") == fqdd:
+ attributes = comp.get("Attributes")
+ break
+ user_attr = dict([(attr["Name"], attr["Value"]) for attr in attributes if attr["Name"].startswith("Users.")])
+ return user_attr
+
+ def _get_omam_ca_env(self):
+ """Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
+ return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py
new file mode 100644
index 00000000..cdb5ddf2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py
@@ -0,0 +1,399 @@
+# -*- coding: utf-8 -*-
+
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.1
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import os
+import time
+from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.six.moves.urllib.parse import urlencode
+
+ome_auth_params = {
+ "hostname": {"required": True, "type": "str"},
+ "username": {"required": True, "type": "str"},
+ "password": {"required": True, "type": "str", "no_log": True},
+ "port": {"type": "int", "default": 443},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+}
+
+SESSION_RESOURCE_COLLECTION = {
+ "SESSION": "SessionService/Sessions",
+ "SESSION_ID": "SessionService/Sessions('{Id}')",
+}
+
+JOB_URI = "JobService/Jobs({job_id})"
+JOB_SERVICE_URI = "JobService/Jobs"
+
+
+class OpenURLResponse(object):
+ """Handles HTTPResponse"""
+
+ def __init__(self, resp):
+ self.body = None
+ self.resp = resp
+ if self.resp:
+ self.body = self.resp.read()
+
+ @property
+ def json_data(self):
+ try:
+ return json.loads(self.body)
+ except ValueError:
+ raise ValueError("Unable to parse json")
+
+ @property
+ def status_code(self):
+ return self.resp.getcode()
+
+ @property
+ def success(self):
+ return self.status_code in (200, 201, 202, 204)
+
+ @property
+ def token_header(self):
+ return self.resp.headers.get('X-Auth-Token')
+
+
+class RestOME(object):
+ """Handles OME API requests"""
+
+ def __init__(self, module_params=None, req_session=False):
+ self.module_params = module_params
+ self.hostname = self.module_params["hostname"]
+ self.username = self.module_params["username"]
+ self.password = self.module_params["password"]
+ self.port = self.module_params["port"]
+ self.validate_certs = self.module_params.get("validate_certs", True)
+ self.ca_path = self.module_params.get("ca_path")
+ self.timeout = self.module_params.get("timeout", 30)
+ self.req_session = req_session
+ self.session_id = None
+ self.protocol = 'https'
+ self._headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+
+ def _get_base_url(self):
+ """builds base url"""
+ return '{0}://{1}:{2}/api'.format(self.protocol, self.hostname, self.port)
+
+ def _build_url(self, path, query_param=None):
+ """builds complete url"""
+ url = path
+ base_uri = self._get_base_url()
+ if path:
+ url = '{0}/{1}'.format(base_uri, path)
+ if query_param:
+ """Ome filtering does not work as expected when '+' is passed,
+ urlencode will encode spaces as '+' so replace it to '%20'"""
+ url += "?{0}".format(urlencode(query_param).replace('+', '%20'))
+ return url
+
+ def _url_common_args_spec(self, method, api_timeout, headers=None):
+ """Creates an argument common spec"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ if api_timeout is None:
+ api_timeout = self.timeout
+ if self.ca_path is None:
+ self.ca_path = self._get_omam_ca_env()
+ url_kwargs = {
+ "method": method,
+ "validate_certs": self.validate_certs,
+ "ca_path": self.ca_path,
+ "use_proxy": True,
+ "headers": req_header,
+ "timeout": api_timeout,
+ "follow_redirects": 'all',
+ }
+ return url_kwargs
+
+ def _args_without_session(self, method, api_timeout, headers=None):
+ """Creates an argument spec in case of basic authentication"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ url_kwargs["url_username"] = self.username
+ url_kwargs["url_password"] = self.password
+ url_kwargs["force_basic_auth"] = True
+ return url_kwargs
+
+ def _args_with_session(self, method, api_timeout, headers=None):
+ """Creates an argument spec, in case of authentication with session"""
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ url_kwargs["force_basic_auth"] = False
+ return url_kwargs
+
+ def invoke_request(self, method, path, data=None, query_param=None, headers=None,
+ api_timeout=None, dump=True):
+ """
+ Sends a request through open_url
+ Returns :class:`OpenURLResponse` object.
+ :arg method: HTTP verb to use for the request
+ :arg path: path to request without query parameter
+ :arg data: (optional) Payload to send with the request
+ :arg query_param: (optional) Dictionary of query parameter to send with request
+ :arg headers: (optional) Dictionary of HTTP Headers to send with the
+ request
+ :arg api_timeout: (optional) How long to wait for the server to send
+ data before giving up
+ :arg dump: (Optional) boolean value for dumping payload data.
+ :returns: OpenURLResponse
+ """
+ try:
+ if 'X-Auth-Token' in self._headers:
+ url_kwargs = self._args_with_session(method, api_timeout, headers=headers)
+ else:
+ url_kwargs = self._args_without_session(method, api_timeout, headers=headers)
+ if data and dump:
+ data = json.dumps(data)
+ url = self._build_url(path, query_param=query_param)
+ resp = open_url(url, data=data, **url_kwargs)
+ resp_data = OpenURLResponse(resp)
+ except (HTTPError, URLError, SSLValidationError, ConnectionError) as err:
+ raise err
+ return resp_data
+
+ def __enter__(self):
+ """Creates sessions by passing it to header"""
+ if self.req_session:
+ payload = {'UserName': self.username,
+ 'Password': self.password,
+ 'SessionType': 'API', }
+ path = SESSION_RESOURCE_COLLECTION["SESSION"]
+ resp = self.invoke_request('POST', path, data=payload)
+ if resp and resp.success:
+ self.session_id = resp.json_data.get("Id")
+ self._headers["X-Auth-Token"] = resp.token_header
+ else:
+ msg = "Could not create the session"
+ raise ConnectionError(msg)
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Deletes a session id, which is in use for request"""
+ if self.session_id:
+ path = SESSION_RESOURCE_COLLECTION["SESSION_ID"].format(Id=self.session_id)
+ self.invoke_request('DELETE', path)
+ return False
+
+ def get_all_report_details(self, uri):
+ """
+ This implementation mainly dependent on '@odata.count' value.
+ Currently first request without query string, always returns total number of available
+ reports in '@odata.count'.
+ """
+ try:
+ resp = self.invoke_request('GET', uri)
+ data = resp.json_data
+ report_list = data["value"]
+ total_count = data['@odata.count']
+ remaining_count = total_count - len(report_list)
+ first_page_count = len(report_list)
+ while remaining_count > 0:
+ resp = self.invoke_request('GET', uri,
+ query_param={"$top": first_page_count, "$skip": len(report_list)})
+ data = resp.json_data
+ value = data["value"]
+ report_list.extend(value)
+ remaining_count = remaining_count - len(value)
+ return {"resp_obj": resp, "report_list": report_list}
+ except (URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError) as err:
+ raise err
+
+ def get_job_type_id(self, jobtype_name):
+ """This provides an ID of the job type."""
+ job_type_id = None
+ resp = self.invoke_request('GET', "JobService/JobTypes")
+ data = resp.json_data["value"]
+ for each in data:
+ if each["Name"] == jobtype_name:
+ job_type_id = each["Id"]
+ break
+ return job_type_id
+
+ def get_device_id_from_service_tag(self, service_tag):
+ """
+ :param service_tag: service tag of the device
+ :return: dict
+ Id: int: device id
+ value: dict: device id details
+ not_found_msg: str: message if service tag not found
+ """
+ device_id = None
+ query = "DeviceServiceTag eq '{0}'".format(service_tag)
+ response = self.invoke_request("GET", "DeviceService/Devices", query_param={"$filter": query})
+ value = response.json_data.get("value", [])
+ device_info = {}
+ if value:
+ device_info = value[0]
+ device_id = device_info["Id"]
+ return {"Id": device_id, "value": device_info}
+
+ def get_all_items_with_pagination(self, uri):
+ """
+ This implementation mainly to get all available items from ome for pagination
+ supported GET uri
+ :param uri: uri which supports pagination
+ :return: dict.
+ """
+ try:
+ resp = self.invoke_request('GET', uri)
+ data = resp.json_data
+ total_items = data.get("value", [])
+ total_count = data.get('@odata.count', 0)
+ next_link = data.get('@odata.nextLink', '')
+ while next_link:
+ resp = self.invoke_request('GET', next_link.split('/api')[-1])
+ data = resp.json_data
+ value = data["value"]
+ next_link = data.get('@odata.nextLink', '')
+ total_items.extend(value)
+ return {"total_count": total_count, "value": total_items}
+ except (URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError) as err:
+ raise err
+
+ def get_device_type(self):
+ """
+ Returns device type map where as key is type and value is type name
+ eg: {1000: "SERVER", 2000: "CHASSIS", 4000: "NETWORK_IOM", "8000": "STORAGE_IOM", 3000: "STORAGE"}
+ :return: dict, first item dict gives device type map
+ """
+ device_map = {}
+ response = self.invoke_request("GET", "DeviceService/DeviceType")
+ if response.json_data.get("value"):
+ device_map = dict([(item["DeviceType"], item["Name"]) for item in response.json_data["value"]])
+ return device_map
+
+ def get_job_info(self, job_id):
+ try:
+ job_status_map = {
+ 2020: "Scheduled", 2030: "Queued", 2040: "Starting", 2050: "Running", 2060: "Completed",
+ 2070: "Failed", 2090: "Warning", 2080: "New", 2100: "Aborted", 2101: "Paused", 2102: "Stopped",
+ 2103: "Canceled"
+ }
+ failed_job_status = [2070, 2090, 2100, 2101, 2102, 2103]
+ job_url = JOB_URI.format(job_id=job_id)
+ job_resp = self.invoke_request('GET', job_url)
+ job_dict = job_resp.json_data
+ job_status = job_dict['LastRunStatus']['Id']
+ if job_status in [2060, 2020]:
+ job_failed = False
+ message = "Job {0} successfully.".format(job_status_map[job_status])
+ exit_poll = True
+ return exit_poll, job_failed, message
+ elif job_status in failed_job_status:
+ exit_poll = True
+ job_failed = True
+ message = "Job is in {0} state, and is not completed.".format(job_status_map[job_status])
+ return exit_poll, job_failed, message
+ return False, False, None
+ except HTTPError:
+ job_failed = True
+ message = "Unable to track the job status of {0}.".format(job_id)
+ exit_poll = True
+ return exit_poll, job_failed, message
+
+ def job_tracking(self, job_id, job_wait_sec=600, sleep_time=60):
+ """
+ job_id: job id
+ job_wait_sec: Maximum time to wait to fetch the final job details in seconds
+ sleep_time: Maximum time to sleep in seconds in each job details fetch
+ """
+ max_sleep_time = job_wait_sec
+ sleep_interval = sleep_time
+ while max_sleep_time:
+ if max_sleep_time > sleep_interval:
+ max_sleep_time = max_sleep_time - sleep_interval
+ else:
+ sleep_interval = max_sleep_time
+ max_sleep_time = 0
+ time.sleep(sleep_interval)
+ exit_poll, job_failed, job_message = self.get_job_info(job_id)
+ if exit_poll is True:
+ return job_failed, job_message
+ return True, "The job is not complete after {0} seconds.".format(job_wait_sec)
+
+ def strip_substr_dict(self, odata_dict, chkstr='@odata.'):
+ cp = odata_dict.copy()
+ klist = cp.keys()
+ for k in klist:
+ if chkstr in str(k).lower():
+ odata_dict.pop(k)
+ return odata_dict
+
+ def job_submission(self, job_name, job_desc, targets, params, job_type,
+ schedule="startnow", state="Enabled"):
+ job_payload = {"JobName": job_name, "JobDescription": job_desc,
+ "Schedule": schedule, "State": state, "Targets": targets,
+ "Params": params, "JobType": job_type}
+ response = self.invoke_request("POST", JOB_SERVICE_URI, data=job_payload)
+ return response
+
+ def test_network_connection(self, share_address, share_path, share_type,
+ share_user=None, share_password=None, share_domain=None):
+ job_type = {"Id": 56, "Name": "ValidateNWFileShare_Task"}
+ params = [
+ {"Key": "checkPathOnly", "Value": "false"},
+ {"Key": "shareType", "Value": share_type},
+ {"Key": "ShareNetworkFilePath", "Value": share_path},
+ {"Key": "shareAddress", "Value": share_address},
+ {"Key": "testShareWriteAccess", "Value": "true"}
+ ]
+ if share_user is not None:
+ params.append({"Key": "UserName", "Value": share_user})
+ if share_password is not None:
+ params.append({"Key": "Password", "Value": share_password})
+ if share_domain is not None:
+ params.append({"Key": "domainName", "Value": share_domain})
+ job_response = self.job_submission("Validate Share", "Validate Share", [], params, job_type)
+ return job_response
+
+ def check_existing_job_state(self, job_type_name):
+ query_param = {"$filter": "LastRunStatus/Id eq 2030 or LastRunStatus/Id eq 2040 or LastRunStatus/Id eq 2050"}
+ job_resp = self.invoke_request("GET", JOB_SERVICE_URI, query_param=query_param)
+ job_lst = job_resp.json_data["value"] if job_resp.json_data.get("value") is not None else []
+ for job in job_lst:
+ if job["JobType"]["Name"] == job_type_name:
+ job_allowed = False
+ available_jobs = job
+ break
+ else:
+ job_allowed = True
+ available_jobs = job_lst
+ return job_allowed, available_jobs
+
+ def _get_omam_ca_env(self):
+ """Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
+ return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py
new file mode 100644
index 00000000..59c46705
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py
@@ -0,0 +1,219 @@
+# -*- coding: utf-8 -*-
+
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.1
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import os
+from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.six.moves.urllib.parse import urlencode
+
+redfish_auth_params = {
+ "baseuri": {"required": True, "type": "str"},
+ "username": {"required": True, "type": "str"},
+ "password": {"required": True, "type": "str", "no_log": True},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+}
+
+SESSION_RESOURCE_COLLECTION = {
+ "SESSION": "/redfish/v1/Sessions",
+ "SESSION_ID": "/redfish/v1/Sessions/{Id}",
+}
+
+
+class OpenURLResponse(object):
+ """Handles HTTPResponse"""
+
+ def __init__(self, resp):
+ self.body = None
+ self.resp = resp
+ if self.resp:
+ self.body = self.resp.read()
+
+ @property
+ def json_data(self):
+ try:
+ return json.loads(self.body)
+ except ValueError:
+ raise ValueError("Unable to parse json")
+
+ @property
+ def status_code(self):
+ return self.resp.getcode()
+
+ @property
+ def success(self):
+ status = self.status_code
+ return status >= 200 & status <= 299
+
+ @property
+ def headers(self):
+ return self.resp.headers
+
+ @property
+ def reason(self):
+ return self.resp.reason
+
+
+class Redfish(object):
+ """Handles iDRAC Redfish API requests"""
+
+ def __init__(self, module_params=None, req_session=False):
+ self.module_params = module_params
+ self.hostname = self.module_params["baseuri"]
+ self.username = self.module_params["username"]
+ self.password = self.module_params["password"]
+ self.validate_certs = self.module_params.get("validate_certs", True)
+ self.ca_path = self.module_params.get("ca_path")
+ self.timeout = self.module_params.get("timeout", 30)
+ self.use_proxy = self.module_params.get("use_proxy", True)
+ self.req_session = req_session
+ self.session_id = None
+ self.protocol = 'https'
+ self.root_uri = '/redfish/v1/'
+ self._headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+
+ def _get_base_url(self):
+ """builds base url"""
+ return '{0}://{1}'.format(self.protocol, self.hostname)
+
+ def _build_url(self, path, query_param=None):
+ """builds complete url"""
+ url = path
+ base_uri = self._get_base_url()
+ if path:
+ url = base_uri + path
+ if query_param:
+ url += "?{0}".format(urlencode(query_param))
+ return url
+
+ def _url_common_args_spec(self, method, api_timeout, headers=None):
+ """Creates an argument common spec"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ if api_timeout is None:
+ api_timeout = self.timeout
+ if self.ca_path is None:
+ self.ca_path = self._get_omam_ca_env()
+ url_kwargs = {
+ "method": method,
+ "validate_certs": self.validate_certs,
+ "ca_path": self.ca_path,
+ "use_proxy": self.use_proxy,
+ "headers": req_header,
+ "timeout": api_timeout,
+ "follow_redirects": 'all',
+ }
+ return url_kwargs
+
+ def _args_without_session(self, path, method, api_timeout, headers=None):
+ """Creates an argument spec in case of basic authentication"""
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ if not (path == SESSION_RESOURCE_COLLECTION["SESSION"] and method == 'POST'):
+ url_kwargs["url_username"] = self.username
+ url_kwargs["url_password"] = self.password
+ url_kwargs["force_basic_auth"] = True
+ return url_kwargs
+
+ def _args_with_session(self, method, api_timeout, headers=None):
+ """Creates an argument spec, in case of authentication with session"""
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ url_kwargs["force_basic_auth"] = False
+ return url_kwargs
+
+ def invoke_request(self, method, path, data=None, query_param=None, headers=None,
+ api_timeout=None, dump=True):
+ """
+ Sends a request through open_url
+ Returns :class:`OpenURLResponse` object.
+ :arg method: HTTP verb to use for the request
+ :arg path: path to request without query parameter
+ :arg data: (optional) Payload to send with the request
+ :arg query_param: (optional) Dictionary of query parameter to send with request
+ :arg headers: (optional) Dictionary of HTTP Headers to send with the
+ request
+ :arg api_timeout: (optional) How long to wait for the server to send
+ data before giving up
+ :arg dump: (Optional) boolean value for dumping payload data.
+ :returns: OpenURLResponse
+ """
+ try:
+ if 'X-Auth-Token' in self._headers:
+ url_kwargs = self._args_with_session(method, api_timeout, headers=headers)
+ else:
+ url_kwargs = self._args_without_session(path, method, api_timeout, headers=headers)
+ if data and dump:
+ data = json.dumps(data)
+ url = self._build_url(path, query_param=query_param)
+ resp = open_url(url, data=data, **url_kwargs)
+ resp_data = OpenURLResponse(resp)
+ except (HTTPError, URLError, SSLValidationError, ConnectionError) as err:
+ raise err
+ return resp_data
+
+ def __enter__(self):
+ """Creates sessions by passing it to header"""
+ if self.req_session:
+ payload = {'UserName': self.username,
+ 'Password': self.password}
+ path = SESSION_RESOURCE_COLLECTION["SESSION"]
+ resp = self.invoke_request('POST', path, data=payload)
+ if resp and resp.success:
+ self.session_id = resp.json_data.get("Id")
+ self._headers["X-Auth-Token"] = resp.headers.get('X-Auth-Token')
+ else:
+ msg = "Could not create the session"
+ raise ConnectionError(msg)
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Deletes a session id, which is in use for request"""
+ if self.session_id:
+ path = SESSION_RESOURCE_COLLECTION["SESSION_ID"].format(Id=self.session_id)
+ self.invoke_request('DELETE', path)
+ return False
+
+ def strip_substr_dict(self, odata_dict, chkstr='@odata.'):
+ cp = odata_dict.copy()
+ klist = cp.keys()
+ for k in klist:
+ if chkstr in str(k).lower():
+ odata_dict.pop(k)
+ return odata_dict
+
+ def _get_omam_ca_env(self):
+ """Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
+ return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py
new file mode 100644
index 00000000..d0da26e5
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+
+# Dell OpenManage Ansible Modules
+# Version 6.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+CHANGES_MSG = "Changes found to be applied."
+NO_CHANGES_MSG = "No changes found to be applied."
+RESET_UNTRACK = "iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
+RESET_SUCCESS = "iDRAC has been reset successfully."
+RESET_FAIL = "Unable to reset the iDRAC. For changes to reflect, manually reset the iDRAC."
+SYSTEM_ID = "System.Embedded.1"
+MANAGER_ID = "iDRAC.Embedded.1"
+SYSTEMS_URI = "/redfish/v1/Systems"
+MANAGERS_URI = "/redfish/v1/Managers"
+IDRAC_RESET_URI = "/redfish/v1/Managers/{res_id}/Actions/Manager.Reset"
+SYSTEM_RESET_URI = "/redfish/v1/Systems/{res_id}/Actions/ComputerSystem.Reset"
+MANAGER_JOB_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs?$expand=*($levels=1)"
+MANAGER_JOB_ID_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/{0}"
+
+
+import time
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+
+
+def strip_substr_dict(odata_dict, chkstr='@odata.', case_sensitive=False):
+ '''
+ :param odata_dict: the dict to be stripped of unwanted keys
+ :param chkstr: the substring to be checked among the keys
+ :param case_sensitive: should the match be case sensitive or not
+ :return: dict
+ '''
+ cp = odata_dict.copy()
+ klist = cp.keys()
+ if not case_sensitive:
+ chkstr = chkstr.lower()
+ for k in klist:
+ if case_sensitive:
+ lk = k
+ else:
+ lk = str(k).lower()
+ if chkstr in lk:
+ odata_dict.pop(k, None)
+ return odata_dict
+
+
+def job_tracking(rest_obj, job_uri, max_job_wait_sec=600, job_state_var=('LastRunStatus', 'Id'),
+ job_complete_states=(2060, 2020, 2090), job_fail_states=(2070, 2101, 2102, 2103),
+ job_running_states=(2050, 2040, 2030, 2100),
+ sleep_interval_secs=10, max_unresponsive_wait=30, initial_wait=1):
+ '''
+ :param rest_obj: the rest_obj either of the below
+ ansible_collections.dellemc.openmanage.plugins.module_utils.ome.RestOME
+ :param job_uri: the uri to fetch the job response dict
+ :param max_job_wait_sec: max time the job will wait
+ :param job_state_var: The nested dict traversal path
+ :param job_complete_states:
+ :param job_fail_states:
+ :param job_running_states:
+ :param sleep_interval_secs:
+ :param max_unresponsive_wait:
+ :param initial_wait:
+ :return:
+ '''
+ # ome_job_status_map = {
+ # 2020: "Scheduled", 2030: "Queued", 2040: "Starting", 2050: "Running", 2060: "completed successfully",
+ # 2070: "Failed", 2090: "completed with errors", 2080: "New", 2100: "Aborted", 2101: "Paused", 2102: "Stopped",
+ # 2103: "Canceled"
+ # }
+ # ensure job states are mutually exclusive
+ max_retries = max_job_wait_sec // sleep_interval_secs
+ unresp = max_unresponsive_wait // sleep_interval_secs
+ loop_ctr = 0
+ job_failed = True
+ job_dict = {}
+ wait_time = 0
+ if set(job_complete_states) & set(job_fail_states):
+ return job_failed, "Overlapping job states found.", job_dict, wait_time
+ msg = "Job tracking started."
+ time.sleep(initial_wait)
+ while loop_ctr < max_retries:
+ loop_ctr += 1
+ try:
+ job_resp = rest_obj.invoke_request('GET', job_uri)
+ job_dict = job_resp.json_data
+ job_status = job_dict
+ for x in job_state_var:
+ job_status = job_status.get(x, {})
+ if job_status in job_complete_states:
+ job_failed = False
+ msg = "Job tracking completed."
+ loop_ctr = max_retries
+ elif job_status in job_fail_states:
+ job_failed = True
+ msg = "Job is in Failed state."
+ loop_ctr = max_retries
+ if job_running_states:
+ if job_status in job_running_states:
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ else:
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ except Exception as err:
+ if unresp:
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ else:
+ job_failed = True
+ msg = "Exception in job tracking " + str(err)
+ break
+ unresp = unresp - 1
+ return job_failed, msg, job_dict, wait_time
+
+
+def idrac_redfish_job_tracking(
+ rest_obj, job_uri, max_job_wait_sec=600, job_state_var='JobState',
+ job_complete_states=("Completed", "Downloaded", "CompletedWithErrors", "RebootCompleted"),
+ job_fail_states=("Failed", "RebootFailed", "Unknown"),
+ job_running_states=("Running", "RebootPending", "Scheduling", "Scheduled", "Downloading", "Waiting", "Paused",
+ "New", "PendingActivation", "ReadyForExecution"),
+ sleep_interval_secs=10, max_unresponsive_wait=30, initial_wait=1):
+ # idrac_redfish_job_sates = [ "New", "Scheduled", "Running", "Completed", "Downloading", "Downloaded",
+ # "Scheduling", "ReadyForExecution", "Waiting", "Paused", "Failed", "CompletedWithErrors", "RebootPending",
+ # "RebootFailed", "RebootCompleted", "PendingActivation", "Unknown"]
+ max_retries = max_job_wait_sec // sleep_interval_secs
+ unresp = max_unresponsive_wait // sleep_interval_secs
+ loop_ctr = 0
+ job_failed = True
+ job_dict = {}
+ wait_time = 0
+ if set(job_complete_states) & set(job_fail_states):
+ return job_failed, "Overlapping job states found.", job_dict, wait_time
+ msg = "Job tracking started."
+ time.sleep(initial_wait)
+ while loop_ctr < max_retries:
+ loop_ctr += 1
+ try:
+ job_resp = rest_obj.invoke_request(job_uri, 'GET')
+ job_dict = job_resp.json_data
+ job_status = job_dict
+ job_status = job_status.get(job_state_var, "Unknown")
+ if job_status in job_running_states:
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ elif job_status in job_complete_states:
+ job_failed = False
+ msg = "Job tracking completed."
+ loop_ctr = max_retries
+ elif job_status in job_fail_states:
+ job_failed = True
+ msg = "Job is in {0} state.".format(job_status)
+ loop_ctr = max_retries
+ else: # unrecognised states, just wait
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ except Exception as err:
+ if unresp:
+ time.sleep(sleep_interval_secs)
+ wait_time = wait_time + sleep_interval_secs
+ else:
+ job_failed = True
+ msg = "Exception in job tracking " + str(err)
+ break
+ unresp = unresp - 1
+ return job_failed, msg, job_dict, wait_time
+
+
+def get_rest_items(rest_obj, uri="DeviceService/Devices", key="Id", value="Identifier", selector="value"):
+ item_dict = {}
+ resp = rest_obj.get_all_items_with_pagination(uri)
+ if resp.get(selector):
+ item_dict = dict((item.get(key), item.get(value)) for item in resp[selector])
+ return item_dict
+
+
+def get_item_and_list(rest_obj, name, uri, key='Name', value='value'):
+ resp = rest_obj.invoke_request('GET', uri)
+ tlist = []
+ if resp.success and resp.json_data.get(value):
+ tlist = resp.json_data.get(value, [])
+ for xtype in tlist:
+ if xtype.get(key, "") == name:
+ return xtype, tlist
+ return {}, tlist
+
+
+def apply_diff_key(src, dest, klist):
+ diff_cnt = 0
+ for k in klist:
+ v = src.get(k)
+ if v is not None and v != dest.get(k):
+ dest[k] = v
+ diff_cnt = diff_cnt + 1
+ return diff_cnt
+
+
+def wait_for_job_completion(redfish_obj, uri, job_wait=True, wait_timeout=120, sleep_time=10):
+ max_sleep_time = wait_timeout
+ sleep_interval = sleep_time
+ if job_wait:
+ while max_sleep_time:
+ if max_sleep_time > sleep_interval:
+ max_sleep_time = max_sleep_time - sleep_interval
+ else:
+ sleep_interval = max_sleep_time
+ max_sleep_time = 0
+ time.sleep(sleep_interval)
+ job_resp = redfish_obj.invoke_request("GET", uri)
+ if job_resp.json_data.get("PercentComplete") == 100:
+ time.sleep(10)
+ return job_resp, ""
+ else:
+ job_resp = redfish_obj.invoke_request("GET", uri)
+ time.sleep(10)
+ return job_resp, ""
+ return {}, "The job is not complete after {0} seconds.".format(wait_timeout)
+
+
+def wait_after_idrac_reset(idrac, wait_time_sec, interval=30):
+ time.sleep(interval // 2)
+ msg = RESET_UNTRACK
+ wait = wait_time_sec
+ track_failed = True
+ while wait > 0:
+ try:
+ idrac.invoke_request(MANAGERS_URI, 'GET')
+ time.sleep(interval // 2)
+ msg = RESET_SUCCESS
+ track_failed = False
+ break
+ except Exception:
+ time.sleep(interval)
+ wait = wait - interval
+ return track_failed, msg
+
+
+# Can this be in idrac_redfish???
+def reset_idrac(idrac_restobj, wait_time_sec=300, res_id=MANAGER_ID, interval=30):
+ track_failed = True
+ reset_msg = "iDRAC reset triggered successfully."
+ try:
+ resp = idrac_restobj.invoke_request(IDRAC_RESET_URI.format(res_id=res_id), 'POST',
+ data={"ResetType": "GracefulRestart"})
+ if wait_time_sec:
+ track_failed, reset_msg = wait_after_idrac_reset(idrac_restobj, wait_time_sec, interval)
+ reset = True
+ except Exception:
+ reset = False
+ reset_msg = RESET_FAIL
+ return reset, track_failed, reset_msg
+
+
+def get_manager_res_id(idrac):
+ try:
+ resp = idrac.invoke_request(MANAGERS_URI, "GET")
+ membs = resp.json_data.get("Members")
+ res_uri = membs[0].get('@odata.id')
+ res_id = res_uri.split("/")[-1]
+ except HTTPError:
+ res_id = MANAGER_ID
+ return res_id
+
+
+def wait_for_idrac_job_completion(idrac, uri, job_wait=True, wait_timeout=120, sleep_time=10):
+ max_sleep_time = wait_timeout
+ sleep_interval = sleep_time
+ job_msg = "The job is not complete after {0} seconds.".format(wait_timeout)
+ if job_wait:
+ while max_sleep_time:
+ if max_sleep_time > sleep_interval:
+ max_sleep_time = max_sleep_time - sleep_interval
+ else:
+ sleep_interval = max_sleep_time
+ max_sleep_time = 0
+ time.sleep(sleep_interval)
+ job_resp = idrac.invoke_request(uri, "GET")
+ if job_resp.json_data.get("PercentComplete") == 100:
+ time.sleep(10)
+ return job_resp, ""
+ if job_resp.json_data.get("JobState") == "RebootFailed":
+ time.sleep(10)
+ return job_resp, job_msg
+ else:
+ job_resp = idrac.invoke_request(uri, "GET")
+ time.sleep(10)
+ return job_resp, ""
+ return {}, "The job is not complete after {0} seconds.".format(wait_timeout)
+
+
+def idrac_system_reset(idrac, res_id, payload=None, job_wait=True, wait_time_sec=300, interval=30):
+ track_failed, reset, job_resp = True, False, {}
+ reset_msg = RESET_UNTRACK
+ try:
+ idrac.invoke_request(SYSTEM_RESET_URI.format(res_id=res_id), 'POST', data=payload)
+ time.sleep(10)
+ if wait_time_sec:
+ resp = idrac.invoke_request(MANAGER_JOB_URI, "GET")
+ job = list(filter(lambda d: d["JobState"] in ["RebootPending"], resp.json_data["Members"]))
+ if job:
+ job_resp, msg = wait_for_idrac_job_completion(idrac, MANAGER_JOB_ID_URI.format(job[0]["Id"]),
+ job_wait=job_wait, wait_timeout=wait_time_sec)
+ if "job is not complete" in msg:
+ reset, reset_msg = False, msg
+ if not msg:
+ reset = True
+ except Exception:
+ reset = False
+ reset_msg = RESET_FAIL
+ return reset, track_failed, reset_msg, job_resp
+
+
+def get_system_res_id(idrac):
+ res_id = SYSTEM_ID
+ error_msg = ""
+ try:
+ resp = idrac.invoke_request(SYSTEMS_URI, "GET")
+ except HTTPError:
+ error_msg = "Unable to complete the request because the resource URI " \
+ "does not exist or is not implemented."
+ else:
+ member = resp.json_data.get("Members")
+ res_uri = member[0].get('@odata.id')
+ res_id = res_uri.split("/")[-1]
+ return res_id, error_msg