diff options
Diffstat (limited to 'ansible_collections/community/general/plugins')
39 files changed, 2165 insertions, 166 deletions
diff --git a/ansible_collections/community/general/plugins/callback/opentelemetry.py b/ansible_collections/community/general/plugins/callback/opentelemetry.py index 58cfa057b..c6e8a87c1 100644 --- a/ansible_collections/community/general/plugins/callback/opentelemetry.py +++ b/ansible_collections/community/general/plugins/callback/opentelemetry.py @@ -556,11 +556,19 @@ class CallbackModule(CallbackBase): self.otel_exporter_otlp_traces_protocol = self.get_option('otel_exporter_otlp_traces_protocol') - def dump_results(self, result): + def dump_results(self, task, result): """ dump the results if disable_logs is not enabled """ if self.disable_logs: return "" - return self._dump_results(result._result) + # ansible.builtin.uri contains the response in the json field + save = dict(result._result) + + if "json" in save and task.action in ("ansible.builtin.uri", "ansible.legacy.uri", "uri"): + save.pop("json") + # ansible.builtin.slurp contains the response in the content field + if "content" in save and task.action in ("ansible.builtin.slurp", "ansible.legacy.slurp", "slurp"): + save.pop("content") + return self._dump_results(save) def v2_playbook_on_start(self, playbook): self.ansible_playbook = basename(playbook._file_name) @@ -611,7 +619,7 @@ class CallbackModule(CallbackBase): self.tasks_data, status, result, - self.dump_results(result) + self.dump_results(self.tasks_data[result._task._uuid], result) ) def v2_runner_on_ok(self, result): @@ -619,7 +627,7 @@ class CallbackModule(CallbackBase): self.tasks_data, 'ok', result, - self.dump_results(result) + self.dump_results(self.tasks_data[result._task._uuid], result) ) def v2_runner_on_skipped(self, result): @@ -627,7 +635,7 @@ class CallbackModule(CallbackBase): self.tasks_data, 'skipped', result, - self.dump_results(result) + self.dump_results(self.tasks_data[result._task._uuid], result) ) def v2_playbook_on_include(self, included_file): diff --git a/ansible_collections/community/general/plugins/doc_fragments/django.py b/ansible_collections/community/general/plugins/doc_fragments/django.py index d92799937..f89ec9144 100644 --- a/ansible_collections/community/general/plugins/doc_fragments/django.py +++ b/ansible_collections/community/general/plugins/doc_fragments/django.py @@ -51,3 +51,12 @@ seealso: Please make sure that you select the right version of Django in the version selector on that page. link: https://docs.djangoproject.com/en/5.0/ref/django-admin/ ''' + + DATABASE = r''' +options: + database: + description: + - Specify the database to be used. + type: str + default: default +''' diff --git a/ansible_collections/community/general/plugins/doc_fragments/proxmox.py b/ansible_collections/community/general/plugins/doc_fragments/proxmox.py index cb533fefa..239dba06d 100644 --- a/ansible_collections/community/general/plugins/doc_fragments/proxmox.py +++ b/ansible_collections/community/general/plugins/doc_fragments/proxmox.py @@ -16,6 +16,13 @@ options: - Specify the target host of the Proxmox VE cluster. type: str required: true + api_port: + description: + - Specify the target port of the Proxmox VE cluster. + - Uses the E(PROXMOX_PORT) environment variable if not specified. + type: int + required: false + version_added: 9.1.0 api_user: description: - Specify the user to authenticate with. diff --git a/ansible_collections/community/general/plugins/filter/keep_keys.py b/ansible_collections/community/general/plugins/filter/keep_keys.py new file mode 100644 index 000000000..dffccba35 --- /dev/null +++ b/ansible_collections/community/general/plugins/filter/keep_keys.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com> +# Copyright (c) 2024 Felix Fontein <felix@fontein.de> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = ''' + name: keep_keys + short_description: Keep specific keys from dictionaries in a list + version_added: "9.1.0" + author: + - Vladimir Botka (@vbotka) + - Felix Fontein (@felixfontein) + description: This filter keeps only specified keys from a provided list of dictionaries. + options: + _input: + description: + - A list of dictionaries. + - Top level keys must be strings. + type: list + elements: dictionary + required: true + target: + description: + - A single key or key pattern to keep, or a list of keys or keys patterns to keep. + - If O(matching_parameter=regex) there must be exactly one pattern provided. + type: raw + required: true + matching_parameter: + description: Specify the matching option of target keys. + type: str + default: equal + choices: + equal: Matches keys of exactly one of the O(target) items. + starts_with: Matches keys that start with one of the O(target) items. + ends_with: Matches keys that end with one of the O(target) items. + regex: + - Matches keys that match the regular expresion provided in O(target). + - In this case, O(target) must be a regex string or a list with single regex string. +''' + +EXAMPLES = ''' + l: + - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo} + - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar} + + # 1) By default match keys that equal any of the items in the target. + t: [k0_x0, k1_x1] + r: "{{ l | community.general.keep_keys(target=t) }}" + + # 2) Match keys that start with any of the items in the target. + t: [k0, k1] + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}" + + # 3) Match keys that end with any of the items in target. + t: [x0, x1] + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}" + + # 4) Match keys by the regex. + t: ['^.*[01]_x.*$'] + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}" + + # 5) Match keys by the regex. + t: '^.*[01]_x.*$' + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}" + + # The results of above examples 1-5 are all the same. + r: + - {k0_x0: A0, k1_x1: B0} + - {k0_x0: A1, k1_x1: B1} + + # 6) By default match keys that equal the target. + t: k0_x0 + r: "{{ l | community.general.keep_keys(target=t) }}" + + # 7) Match keys that start with the target. + t: k0 + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}" + + # 8) Match keys that end with the target. + t: x0 + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}" + + # 9) Match keys by the regex. + t: '^.*0_x.*$' + r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}" + + # The results of above examples 6-9 are all the same. + r: + - {k0_x0: A0} + - {k0_x0: A1} +''' + +RETURN = ''' + _value: + description: The list of dictionaries with selected keys. + type: list + elements: dictionary +''' + +from ansible_collections.community.general.plugins.plugin_utils.keys_filter import ( + _keys_filter_params, + _keys_filter_target_str) + + +def keep_keys(data, target=None, matching_parameter='equal'): + """keep specific keys from dictionaries in a list""" + + # test parameters + _keys_filter_params(data, matching_parameter) + # test and transform target + tt = _keys_filter_target_str(target, matching_parameter) + + if matching_parameter == 'equal': + def keep_key(key): + return key in tt + elif matching_parameter == 'starts_with': + def keep_key(key): + return key.startswith(tt) + elif matching_parameter == 'ends_with': + def keep_key(key): + return key.endswith(tt) + elif matching_parameter == 'regex': + def keep_key(key): + return tt.match(key) is not None + + return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data] + + +class FilterModule(object): + + def filters(self): + return { + 'keep_keys': keep_keys, + } diff --git a/ansible_collections/community/general/plugins/filter/lists_mergeby.py b/ansible_collections/community/general/plugins/filter/lists_mergeby.py index caf183492..0e47d5017 100644 --- a/ansible_collections/community/general/plugins/filter/lists_mergeby.py +++ b/ansible_collections/community/general/plugins/filter/lists_mergeby.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2020-2022, Vladimir Botka <vbotka@gmail.com> +# Copyright (c) 2020-2024, Vladimir Botka <vbotka@gmail.com> # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later @@ -12,22 +12,32 @@ DOCUMENTATION = ''' version_added: 2.0.0 author: Vladimir Botka (@vbotka) description: - - Merge two or more lists by attribute O(index). Optional parameters O(recursive) and O(list_merge) - control the merging of the lists in values. The function merge_hash from ansible.utils.vars - is used. To learn details on how to use the parameters O(recursive) and O(list_merge) see - Ansible User's Guide chapter "Using filters to manipulate data" section "Combining - hashes/dictionaries". + - Merge two or more lists by attribute O(index). Optional + parameters O(recursive) and O(list_merge) control the merging of + the nested dictionaries and lists. + - The function C(merge_hash) from C(ansible.utils.vars) is used. + - To learn details on how to use the parameters O(recursive) and + O(list_merge) see Ansible User's Guide chapter "Using filters to + manipulate data" section R(Combining hashes/dictionaries, combine_filter) or the + filter P(ansible.builtin.combine#filter). + positional: another_list, index options: _input: - description: A list of dictionaries. + description: + - A list of dictionaries, or a list of lists of dictionaries. + - The required type of the C(elements) is set to C(raw) + because all elements of O(_input) can be either dictionaries + or lists. type: list - elements: dictionary + elements: raw required: true another_list: - description: Another list of dictionaries. This parameter can be specified multiple times. + description: + - Another list of dictionaries, or a list of lists of dictionaries. + - This parameter can be specified multiple times. type: list - elements: dictionary + elements: raw index: description: - The dictionary key that must be present in every dictionary in every list that is used to @@ -55,40 +65,134 @@ DOCUMENTATION = ''' ''' EXAMPLES = ''' -- name: Merge two lists +# Some results below are manually formatted for better readability. The +# dictionaries' keys will be sorted alphabetically in real output. + +- name: Example 1. Merge two lists. The results r1 and r2 are the same. + ansible.builtin.debug: + msg: | + r1: {{ r1 }} + r2: {{ r2 }} + vars: + list1: + - {index: a, value: 123} + - {index: b, value: 4} + list2: + - {index: a, foo: bar} + - {index: c, foo: baz} + r1: "{{ list1 | community.general.lists_mergeby(list2, 'index') }}" + r2: "{{ [list1, list2] | community.general.lists_mergeby('index') }}" + +# r1: +# - {index: a, foo: bar, value: 123} +# - {index: b, value: 4} +# - {index: c, foo: baz} +# r2: +# - {index: a, foo: bar, value: 123} +# - {index: b, value: 4} +# - {index: c, foo: baz} + +- name: Example 2. Merge three lists + ansible.builtin.debug: + var: r + vars: + list1: + - {index: a, value: 123} + - {index: b, value: 4} + list2: + - {index: a, foo: bar} + - {index: c, foo: baz} + list3: + - {index: d, foo: qux} + r: "{{ [list1, list2, list3] | community.general.lists_mergeby('index') }}" + +# r: +# - {index: a, foo: bar, value: 123} +# - {index: b, value: 4} +# - {index: c, foo: baz} +# - {index: d, foo: qux} + +- name: Example 3. Merge single list. The result is the same as 2. + ansible.builtin.debug: + var: r + vars: + list1: + - {index: a, value: 123} + - {index: b, value: 4} + - {index: a, foo: bar} + - {index: c, foo: baz} + - {index: d, foo: qux} + r: "{{ [list1, []] | community.general.lists_mergeby('index') }}" + +# r: +# - {index: a, foo: bar, value: 123} +# - {index: b, value: 4} +# - {index: c, foo: baz} +# - {index: d, foo: qux} + +- name: Example 4. Merge two lists. By default, replace nested lists. + ansible.builtin.debug: + var: r + vars: + list1: + - {index: a, foo: [X1, X2]} + - {index: b, foo: [X1, X2]} + list2: + - {index: a, foo: [Y1, Y2]} + - {index: b, foo: [Y1, Y2]} + r: "{{ [list1, list2] | community.general.lists_mergeby('index') }}" + +# r: +# - {index: a, foo: [Y1, Y2]} +# - {index: b, foo: [Y1, Y2]} + +- name: Example 5. Merge two lists. Append nested lists. + ansible.builtin.debug: + var: r + vars: + list1: + - {index: a, foo: [X1, X2]} + - {index: b, foo: [X1, X2]} + list2: + - {index: a, foo: [Y1, Y2]} + - {index: b, foo: [Y1, Y2]} + r: "{{ [list1, list2] | community.general.lists_mergeby('index', list_merge='append') }}" + +# r: +# - {index: a, foo: [X1, X2, Y1, Y2]} +# - {index: b, foo: [X1, X2, Y1, Y2]} + +- name: Example 6. Merge two lists. By default, do not merge nested dictionaries. + ansible.builtin.debug: + var: r + vars: + list1: + - {index: a, foo: {x: 1, y: 2}} + - {index: b, foo: [X1, X2]} + list2: + - {index: a, foo: {y: 3, z: 4}} + - {index: b, foo: [Y1, Y2]} + r: "{{ [list1, list2] | community.general.lists_mergeby('index') }}" + +# r: +# - {index: a, foo: {y: 3, z: 4}} +# - {index: b, foo: [Y1, Y2]} + +- name: Example 7. Merge two lists. Merge nested dictionaries too. ansible.builtin.debug: - msg: >- - {{ list1 | community.general.lists_mergeby( - list2, - 'index', - recursive=True, - list_merge='append' - ) }}" + var: r vars: list1: - - index: a - value: 123 - - index: b - value: 42 + - {index: a, foo: {x: 1, y: 2}} + - {index: b, foo: [X1, X2]} list2: - - index: a - foo: bar - - index: c - foo: baz - # Produces the following list of dictionaries: - # { - # "index": "a", - # "foo": "bar", - # "value": 123 - # }, - # { - # "index": "b", - # "value": 42 - # }, - # { - # "index": "c", - # "foo": "baz" - # } + - {index: a, foo: {y: 3, z: 4}} + - {index: b, foo: [Y1, Y2]} + r: "{{ [list1, list2] | community.general.lists_mergeby('index', recursive=true) }}" + +# r: +# - {index: a, foo: {x:1, y: 3, z: 4}} +# - {index: b, foo: [Y1, Y2]} ''' RETURN = ''' @@ -108,13 +212,14 @@ from operator import itemgetter def list_mergeby(x, y, index, recursive=False, list_merge='replace'): - ''' Merge 2 lists by attribute 'index'. The function merge_hash from ansible.utils.vars is used. - This function is used by the function lists_mergeby. + '''Merge 2 lists by attribute 'index'. The function 'merge_hash' + from ansible.utils.vars is used. This function is used by the + function lists_mergeby. ''' d = defaultdict(dict) - for l in (x, y): - for elem in l: + for lst in (x, y): + for elem in lst: if not isinstance(elem, Mapping): msg = "Elements of list arguments for lists_mergeby must be dictionaries. %s is %s" raise AnsibleFilterError(msg % (elem, type(elem))) @@ -124,20 +229,9 @@ def list_mergeby(x, y, index, recursive=False, list_merge='replace'): def lists_mergeby(*terms, **kwargs): - ''' Merge 2 or more lists by attribute 'index'. Optional parameters 'recursive' and 'list_merge' - control the merging of the lists in values. The function merge_hash from ansible.utils.vars - is used. To learn details on how to use the parameters 'recursive' and 'list_merge' see - Ansible User's Guide chapter "Using filters to manipulate data" section "Combining - hashes/dictionaries". - - Example: - - debug: - msg: "{{ list1| - community.general.lists_mergeby(list2, - 'index', - recursive=True, - list_merge='append')| - list }}" + '''Merge 2 or more lists by attribute 'index'. To learn details + on how to use the parameters 'recursive' and 'list_merge' see + the filter ansible.builtin.combine. ''' recursive = kwargs.pop('recursive', False) @@ -155,7 +249,7 @@ def lists_mergeby(*terms, **kwargs): "must be lists. %s is %s") raise AnsibleFilterError(msg % (sublist, type(sublist))) if len(sublist) > 0: - if all(isinstance(l, Sequence) for l in sublist): + if all(isinstance(lst, Sequence) for lst in sublist): for item in sublist: flat_list.append(item) else: diff --git a/ansible_collections/community/general/plugins/filter/remove_keys.py b/ansible_collections/community/general/plugins/filter/remove_keys.py new file mode 100644 index 000000000..cabce1468 --- /dev/null +++ b/ansible_collections/community/general/plugins/filter/remove_keys.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com> +# Copyright (c) 2024 Felix Fontein <felix@fontein.de> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = ''' + name: remove_keys + short_description: Remove specific keys from dictionaries in a list + version_added: "9.1.0" + author: + - Vladimir Botka (@vbotka) + - Felix Fontein (@felixfontein) + description: This filter removes only specified keys from a provided list of dictionaries. + options: + _input: + description: + - A list of dictionaries. + - Top level keys must be strings. + type: list + elements: dictionary + required: true + target: + description: + - A single key or key pattern to remove, or a list of keys or keys patterns to remove. + - If O(matching_parameter=regex) there must be exactly one pattern provided. + type: raw + required: true + matching_parameter: + description: Specify the matching option of target keys. + type: str + default: equal + choices: + equal: Matches keys of exactly one of the O(target) items. + starts_with: Matches keys that start with one of the O(target) items. + ends_with: Matches keys that end with one of the O(target) items. + regex: + - Matches keys that match the regular expresion provided in O(target). + - In this case, O(target) must be a regex string or a list with single regex string. +''' + +EXAMPLES = ''' + l: + - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo} + - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar} + + # 1) By default match keys that equal any of the items in the target. + t: [k0_x0, k1_x1] + r: "{{ l | community.general.remove_keys(target=t) }}" + + # 2) Match keys that start with any of the items in the target. + t: [k0, k1] + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}" + + # 3) Match keys that end with any of the items in target. + t: [x0, x1] + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}" + + # 4) Match keys by the regex. + t: ['^.*[01]_x.*$'] + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}" + + # 5) Match keys by the regex. + t: '^.*[01]_x.*$' + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}" + + # The results of above examples 1-5 are all the same. + r: + - {k2_x2: [C0], k3_x3: foo} + - {k2_x2: [C1], k3_x3: bar} + + # 6) By default match keys that equal the target. + t: k0_x0 + r: "{{ l | community.general.remove_keys(target=t) }}" + + # 7) Match keys that start with the target. + t: k0 + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}" + + # 8) Match keys that end with the target. + t: x0 + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}" + + # 9) Match keys by the regex. + t: '^.*0_x.*$' + r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}" + + # The results of above examples 6-9 are all the same. + r: + - {k1_x1: B0, k2_x2: [C0], k3_x3: foo} + - {k1_x1: B1, k2_x2: [C1], k3_x3: bar} +''' + +RETURN = ''' + _value: + description: The list of dictionaries with selected keys removed. + type: list + elements: dictionary +''' + +from ansible_collections.community.general.plugins.plugin_utils.keys_filter import ( + _keys_filter_params, + _keys_filter_target_str) + + +def remove_keys(data, target=None, matching_parameter='equal'): + """remove specific keys from dictionaries in a list""" + + # test parameters + _keys_filter_params(data, matching_parameter) + # test and transform target + tt = _keys_filter_target_str(target, matching_parameter) + + if matching_parameter == 'equal': + def keep_key(key): + return key not in tt + elif matching_parameter == 'starts_with': + def keep_key(key): + return not key.startswith(tt) + elif matching_parameter == 'ends_with': + def keep_key(key): + return not key.endswith(tt) + elif matching_parameter == 'regex': + def keep_key(key): + return tt.match(key) is None + + return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data] + + +class FilterModule(object): + + def filters(self): + return { + 'remove_keys': remove_keys, + } diff --git a/ansible_collections/community/general/plugins/filter/replace_keys.py b/ansible_collections/community/general/plugins/filter/replace_keys.py new file mode 100644 index 000000000..d3b12c05d --- /dev/null +++ b/ansible_collections/community/general/plugins/filter/replace_keys.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com> +# Copyright (c) 2024 Felix Fontein <felix@fontein.de> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = ''' + name: replace_keys + short_description: Replace specific keys in a list of dictionaries + version_added: "9.1.0" + author: + - Vladimir Botka (@vbotka) + - Felix Fontein (@felixfontein) + description: This filter replaces specified keys in a provided list of dictionaries. + options: + _input: + description: + - A list of dictionaries. + - Top level keys must be strings. + type: list + elements: dictionary + required: true + target: + description: + - A list of dictionaries with attributes C(before) and C(after). + - The value of O(target[].after) replaces key matching O(target[].before). + type: list + elements: dictionary + required: true + suboptions: + before: + description: + - A key or key pattern to change. + - The interpretation of O(target[].before) depends on O(matching_parameter). + - For a key that matches multiple O(target[].before)s, the B(first) matching O(target[].after) will be used. + type: str + after: + description: A matching key change to. + type: str + matching_parameter: + description: Specify the matching option of target keys. + type: str + default: equal + choices: + equal: Matches keys of exactly one of the O(target[].before) items. + starts_with: Matches keys that start with one of the O(target[].before) items. + ends_with: Matches keys that end with one of the O(target[].before) items. + regex: Matches keys that match one of the regular expressions provided in O(target[].before). +''' + +EXAMPLES = ''' + l: + - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo} + - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar} + + # 1) By default, replace keys that are equal any of the attributes before. + t: + - {before: k0_x0, after: a0} + - {before: k1_x1, after: a1} + r: "{{ l | community.general.replace_keys(target=t) }}" + + # 2) Replace keys that starts with any of the attributes before. + t: + - {before: k0, after: a0} + - {before: k1, after: a1} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}" + + # 3) Replace keys that ends with any of the attributes before. + t: + - {before: x0, after: a0} + - {before: x1, after: a1} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='ends_with') }}" + + # 4) Replace keys that match any regex of the attributes before. + t: + - {before: "^.*0_x.*$", after: a0} + - {before: "^.*1_x.*$", after: a1} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}" + + # The results of above examples 1-4 are all the same. + r: + - {a0: A0, a1: B0, k2_x2: [C0], k3_x3: foo} + - {a0: A1, a1: B1, k2_x2: [C1], k3_x3: bar} + + # 5) If more keys match the same attribute before the last one will be used. + t: + - {before: "^.*_x.*$", after: X} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}" + + # gives + + r: + - X: foo + - X: bar + + # 6) If there are items with equal attribute before the first one will be used. + t: + - {before: "^.*_x.*$", after: X} + - {before: "^.*_x.*$", after: Y} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}" + + # gives + + r: + - X: foo + - X: bar + + # 7) If there are more matches for a key the first one will be used. + l: + - {aaa1: A, bbb1: B, ccc1: C} + - {aaa2: D, bbb2: E, ccc2: F} + t: + - {before: a, after: X} + - {before: aa, after: Y} + r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}" + + # gives + + r: + - {X: A, bbb1: B, ccc1: C} + - {X: D, bbb2: E, ccc2: F} +''' + +RETURN = ''' + _value: + description: The list of dictionaries with replaced keys. + type: list + elements: dictionary +''' + +from ansible_collections.community.general.plugins.plugin_utils.keys_filter import ( + _keys_filter_params, + _keys_filter_target_dict) + + +def replace_keys(data, target=None, matching_parameter='equal'): + """replace specific keys in a list of dictionaries""" + + # test parameters + _keys_filter_params(data, matching_parameter) + # test and transform target + tz = _keys_filter_target_dict(target, matching_parameter) + + if matching_parameter == 'equal': + def replace_key(key): + for b, a in tz: + if key == b: + return a + return key + elif matching_parameter == 'starts_with': + def replace_key(key): + for b, a in tz: + if key.startswith(b): + return a + return key + elif matching_parameter == 'ends_with': + def replace_key(key): + for b, a in tz: + if key.endswith(b): + return a + return key + elif matching_parameter == 'regex': + def replace_key(key): + for b, a in tz: + if b.match(key): + return a + return key + + return [dict((replace_key(k), v) for k, v in d.items()) for d in data] + + +class FilterModule(object): + + def filters(self): + return { + 'replace_keys': replace_keys, + } diff --git a/ansible_collections/community/general/plugins/inventory/opennebula.py b/ansible_collections/community/general/plugins/inventory/opennebula.py index b097307c3..bf81758ef 100644 --- a/ansible_collections/community/general/plugins/inventory/opennebula.py +++ b/ansible_collections/community/general/plugins/inventory/opennebula.py @@ -143,7 +143,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable): nic = [nic] for net in nic: - return net['IP'] + if net.get('IP'): + return net['IP'] return False diff --git a/ansible_collections/community/general/plugins/module_utils/cmd_runner.py b/ansible_collections/community/general/plugins/module_utils/cmd_runner.py index 2bf2b32e8..da4f1b6fc 100644 --- a/ansible_collections/community/general/plugins/module_utils/cmd_runner.py +++ b/ansible_collections/community/general/plugins/module_utils/cmd_runner.py @@ -89,18 +89,31 @@ class FormatError(CmdRunnerException): class _ArgFormat(object): + # DEPRECATION: set default value for ignore_none to True in community.general 12.0.0 def __init__(self, func, ignore_none=None, ignore_missing_value=False): self.func = func self.ignore_none = ignore_none self.ignore_missing_value = ignore_missing_value - def __call__(self, value, ctx_ignore_none): + # DEPRECATION: remove parameter ctx_ignore_none in community.general 12.0.0 + def __call__(self, value, ctx_ignore_none=True): + # DEPRECATION: replace ctx_ignore_none with True in community.general 12.0.0 ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none if value is None and ignore_none: return [] f = self.func return [str(x) for x in f(value)] + def __str__(self): + return "<ArgFormat: func={0}, ignore_none={1}, ignore_missing_value={2}>".format( + self.func, + self.ignore_none, + self.ignore_missing_value, + ) + + def __repr__(self): + return str(self) + class _Format(object): @staticmethod @@ -114,7 +127,7 @@ class _Format(object): @staticmethod def as_bool_not(args): - return _ArgFormat(lambda value: [] if value else _ensure_list(args), ignore_none=False) + return _Format.as_bool([], args, ignore_none=False) @staticmethod def as_optval(arg, ignore_none=None): @@ -184,6 +197,19 @@ class _Format(object): return func(**v) return wrapper + @staticmethod + def stack(fmt): + @wraps(fmt) + def wrapper(*args, **kwargs): + new_func = fmt(ignore_none=True, *args, **kwargs) + + def stacking(value): + stack = [new_func(v) for v in value if v] + stack = [x for args in stack for x in args] + return stack + return _ArgFormat(stacking, ignore_none=True) + return wrapper + class CmdRunner(object): """ @@ -204,7 +230,11 @@ class CmdRunner(object): self.default_args_order = self._prepare_args_order(default_args_order) if arg_formats is None: arg_formats = {} - self.arg_formats = dict(arg_formats) + self.arg_formats = {} + for fmt_name, fmt in arg_formats.items(): + if not isinstance(fmt, _ArgFormat): + fmt = _Format.as_func(func=fmt, ignore_none=True) + self.arg_formats[fmt_name] = fmt self.check_rc = check_rc self.force_lang = force_lang self.path_prefix = path_prefix @@ -223,7 +253,16 @@ class CmdRunner(object): def binary(self): return self.command[0] - def __call__(self, args_order=None, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs): + # remove parameter ignore_value_none in community.general 12.0.0 + def __call__(self, args_order=None, output_process=None, ignore_value_none=None, check_mode_skip=False, check_mode_return=None, **kwargs): + if ignore_value_none is None: + ignore_value_none = True + else: + self.module.deprecate( + "Using ignore_value_none when creating the runner context is now deprecated, " + "and the parameter will be removed in community.general 12.0.0. ", + version="12.0.0", collection_name="community.general" + ) if output_process is None: output_process = _process_as_is if args_order is None: @@ -235,7 +274,7 @@ class CmdRunner(object): return _CmdRunnerContext(runner=self, args_order=args_order, output_process=output_process, - ignore_value_none=ignore_value_none, + ignore_value_none=ignore_value_none, # DEPRECATION: remove in community.general 12.0.0 check_mode_skip=check_mode_skip, check_mode_return=check_mode_return, **kwargs) @@ -251,6 +290,7 @@ class _CmdRunnerContext(object): self.runner = runner self.args_order = tuple(args_order) self.output_process = output_process + # DEPRECATION: parameter ignore_value_none at the context level is deprecated and will be removed in community.general 12.0.0 self.ignore_value_none = ignore_value_none self.check_mode_skip = check_mode_skip self.check_mode_return = check_mode_return @@ -290,6 +330,7 @@ class _CmdRunnerContext(object): value = named_args[arg_name] elif not runner.arg_formats[arg_name].ignore_missing_value: raise MissingArgumentValue(self.args_order, arg_name) + # DEPRECATION: remove parameter ctx_ignore_none in 12.0.0 self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none)) except MissingArgumentValue: raise @@ -306,7 +347,7 @@ class _CmdRunnerContext(object): @property def run_info(self): return dict( - ignore_value_none=self.ignore_value_none, + ignore_value_none=self.ignore_value_none, # DEPRECATION: remove in community.general 12.0.0 check_rc=self.check_rc, environ_update=self.environ_update, args_order=self.args_order, diff --git a/ansible_collections/community/general/plugins/module_utils/consul.py b/ansible_collections/community/general/plugins/module_utils/consul.py index 68c1a130b..cd54a105f 100644 --- a/ansible_collections/community/general/plugins/module_utils/consul.py +++ b/ansible_collections/community/general/plugins/module_utils/consul.py @@ -10,6 +10,7 @@ __metaclass__ = type import copy import json +import re from ansible.module_utils.six.moves.urllib import error as urllib_error from ansible.module_utils.six.moves.urllib.parse import urlencode @@ -68,6 +69,25 @@ def camel_case_key(key): return "".join(parts) +def validate_check(check): + validate_duration_keys = ['Interval', 'Ttl', 'Timeout'] + validate_tcp_regex = r"(?P<host>.*):(?P<port>(?:[0-9]+))$" + if check.get('Tcp') is not None: + match = re.match(validate_tcp_regex, check['Tcp']) + if not match: + raise Exception('tcp check must be in host:port format') + for duration in validate_duration_keys: + if duration in check and check[duration] is not None: + check[duration] = validate_duration(check[duration]) + + +def validate_duration(duration): + if duration: + if not re.search(r"\d+(?:ns|us|ms|s|m|h)", duration): + duration = "{0}s".format(duration) + return duration + + STATE_PARAMETER = "state" STATE_PRESENT = "present" STATE_ABSENT = "absent" @@ -81,7 +101,7 @@ OPERATION_DELETE = "remove" def _normalize_params(params, arg_spec): final_params = {} for k, v in params.items(): - if k not in arg_spec: # Alias + if k not in arg_spec or v is None: # Alias continue spec = arg_spec[k] if ( @@ -105,9 +125,10 @@ class _ConsulModule: """ api_endpoint = None # type: str - unique_identifier = None # type: str + unique_identifiers = None # type: list result_key = None # type: str create_only_fields = set() + operational_attributes = set() params = {} def __init__(self, module): @@ -119,6 +140,8 @@ class _ConsulModule: if k not in STATE_PARAMETER and k not in AUTH_ARGUMENTS_SPEC } + self.operational_attributes.update({"CreateIndex", "CreateTime", "Hash", "ModifyIndex"}) + def execute(self): obj = self.read_object() @@ -203,14 +226,24 @@ class _ConsulModule: return False def prepare_object(self, existing, obj): - operational_attributes = {"CreateIndex", "CreateTime", "Hash", "ModifyIndex"} existing = { - k: v for k, v in existing.items() if k not in operational_attributes + k: v for k, v in existing.items() if k not in self.operational_attributes } for k, v in obj.items(): existing[k] = v return existing + def id_from_obj(self, obj, camel_case=False): + def key_func(key): + return camel_case_key(key) if camel_case else key + + if self.unique_identifiers: + for identifier in self.unique_identifiers: + identifier = key_func(identifier) + if identifier in obj: + return obj[identifier] + return None + def endpoint_url(self, operation, identifier=None): if operation == OPERATION_CREATE: return self.api_endpoint @@ -219,7 +252,8 @@ class _ConsulModule: raise RuntimeError("invalid arguments passed") def read_object(self): - url = self.endpoint_url(OPERATION_READ, self.params.get(self.unique_identifier)) + identifier = self.id_from_obj(self.params) + url = self.endpoint_url(OPERATION_READ, identifier) try: return self.get(url) except RequestError as e: @@ -233,25 +267,28 @@ class _ConsulModule: if self._module.check_mode: return obj else: - return self.put(self.api_endpoint, data=self.prepare_object({}, obj)) + url = self.endpoint_url(OPERATION_CREATE) + created_obj = self.put(url, data=self.prepare_object({}, obj)) + if created_obj is None: + created_obj = self.read_object() + return created_obj def update_object(self, existing, obj): - url = self.endpoint_url( - OPERATION_UPDATE, existing.get(camel_case_key(self.unique_identifier)) - ) merged_object = self.prepare_object(existing, obj) if self._module.check_mode: return merged_object else: - return self.put(url, data=merged_object) + url = self.endpoint_url(OPERATION_UPDATE, self.id_from_obj(existing, camel_case=True)) + updated_obj = self.put(url, data=merged_object) + if updated_obj is None: + updated_obj = self.read_object() + return updated_obj def delete_object(self, obj): if self._module.check_mode: return {} else: - url = self.endpoint_url( - OPERATION_DELETE, obj.get(camel_case_key(self.unique_identifier)) - ) + url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True)) return self.delete(url) def _request(self, method, url_parts, data=None, params=None): @@ -309,7 +346,9 @@ class _ConsulModule: if 400 <= status < 600: raise RequestError(status, response_data) - return json.loads(response_data) + if response_data: + return json.loads(response_data) + return None def get(self, url_parts, **kwargs): return self._request("GET", url_parts, **kwargs) diff --git a/ansible_collections/community/general/plugins/module_utils/django.py b/ansible_collections/community/general/plugins/module_utils/django.py index fbaf840db..5fb375c6f 100644 --- a/ansible_collections/community/general/plugins/module_utils/django.py +++ b/ansible_collections/community/general/plugins/module_utils/django.py @@ -7,6 +7,7 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type +from ansible.module_utils.common.dict_transformations import dict_merge from ansible_collections.community.general.plugins.module_utils.cmd_runner import cmd_runner_fmt from ansible_collections.community.general.plugins.module_utils.python_runner import PythonRunner from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper @@ -33,6 +34,18 @@ _django_std_arg_fmts = dict( skip_checks=cmd_runner_fmt.as_bool("--skip-checks"), ) +_django_database_args = dict( + database=dict(type="str", default="default"), +) + +_args_menu = dict( + std=(django_std_args, _django_std_arg_fmts), + database=(_django_database_args, {"database": cmd_runner_fmt.as_opt_eq_val("--database")}), + noinput=({}, {"noinput": cmd_runner_fmt.as_fixed("--noinput")}), + dry_run=({}, {"dry_run": cmd_runner_fmt.as_bool("--dry-run")}), + check=({}, {"check": cmd_runner_fmt.as_bool("--check")}), +) + class _DjangoRunner(PythonRunner): def __init__(self, module, arg_formats=None, **kwargs): @@ -55,15 +68,30 @@ class DjangoModuleHelper(ModuleHelper): arg_formats = {} django_admin_arg_order = () use_old_vardict = False + _django_args = [] + _check_mode_arg = "" def __init__(self): - argument_spec = dict(django_std_args) - argument_spec.update(self.module.get("argument_spec", {})) - self.module["argument_spec"] = argument_spec + self.module["argument_spec"], self.arg_formats = self._build_args(self.module.get("argument_spec", {}), + self.arg_formats, + *(["std"] + self._django_args)) super(DjangoModuleHelper, self).__init__(self.module) if self.django_admin_cmd is not None: self.vars.command = self.django_admin_cmd + @staticmethod + def _build_args(arg_spec, arg_format, *names): + res_arg_spec = {} + res_arg_fmts = {} + for name in names: + args, fmts = _args_menu[name] + res_arg_spec = dict_merge(res_arg_spec, args) + res_arg_fmts = dict_merge(res_arg_fmts, fmts) + res_arg_spec = dict_merge(res_arg_spec, arg_spec) + res_arg_fmts = dict_merge(res_arg_fmts, arg_format) + + return res_arg_spec, res_arg_fmts + def __run__(self): runner = _DjangoRunner(self.module, default_args_order=self.django_admin_arg_order, @@ -71,7 +99,10 @@ class DjangoModuleHelper(ModuleHelper): venv=self.vars.venv, check_rc=True) with runner() as ctx: - results = ctx.run() + run_params = self.vars.as_dict() + if self._check_mode_arg: + run_params.update({self._check_mode_arg: self.check_mode}) + results = ctx.run(**run_params) self.vars.stdout = ctx.results_out self.vars.stderr = ctx.results_err self.vars.cmd = ctx.cmd diff --git a/ansible_collections/community/general/plugins/module_utils/proxmox.py b/ansible_collections/community/general/plugins/module_utils/proxmox.py index 5fd783d65..05bf1874b 100644 --- a/ansible_collections/community/general/plugins/module_utils/proxmox.py +++ b/ansible_collections/community/general/plugins/module_utils/proxmox.py @@ -29,6 +29,9 @@ def proxmox_auth_argument_spec(): required=True, fallback=(env_fallback, ['PROXMOX_HOST']) ), + api_port=dict(type='int', + fallback=(env_fallback, ['PROXMOX_PORT']) + ), api_user=dict(type='str', required=True, fallback=(env_fallback, ['PROXMOX_USER']) @@ -82,6 +85,7 @@ class ProxmoxAnsible(object): def _connect(self): api_host = self.module.params['api_host'] + api_port = self.module.params['api_port'] api_user = self.module.params['api_user'] api_password = self.module.params['api_password'] api_token_id = self.module.params['api_token_id'] @@ -89,6 +93,10 @@ class ProxmoxAnsible(object): validate_certs = self.module.params['validate_certs'] auth_args = {'user': api_user} + + if api_port: + auth_args['port'] = api_port + if api_password: auth_args['password'] = api_password else: diff --git a/ansible_collections/community/general/plugins/module_utils/redfish_utils.py b/ansible_collections/community/general/plugins/module_utils/redfish_utils.py index 6935573d0..139628bd9 100644 --- a/ansible_collections/community/general/plugins/module_utils/redfish_utils.py +++ b/ansible_collections/community/general/plugins/module_utils/redfish_utils.py @@ -11,6 +11,7 @@ import os import random import string import gzip +import time from io import BytesIO from ansible.module_utils.urls import open_url from ansible.module_utils.common.text.converters import to_native @@ -132,11 +133,13 @@ class RedfishUtils(object): return resp # The following functions are to send GET/POST/PATCH/DELETE requests - def get_request(self, uri, override_headers=None, allow_no_resp=False): + def get_request(self, uri, override_headers=None, allow_no_resp=False, timeout=None): req_headers = dict(GET_HEADERS) if override_headers: req_headers.update(override_headers) username, password, basic_auth = self._auth_params(req_headers) + if timeout is None: + timeout = self.timeout try: # Service root is an unauthenticated resource; remove credentials # in case the caller will be using sessions later. @@ -146,7 +149,7 @@ class RedfishUtils(object): url_username=username, url_password=password, force_basic_auth=basic_auth, validate_certs=False, follow_redirects='all', - use_proxy=True, timeout=self.timeout) + use_proxy=True, timeout=timeout) headers = dict((k.lower(), v) for (k, v) in resp.info().items()) try: if headers.get('content-encoding') == 'gzip' and LooseVersion(ansible_version) < LooseVersion('2.14'): @@ -624,6 +627,24 @@ class RedfishUtils(object): allowable_values = default_values return allowable_values + def check_service_availability(self): + """ + Checks if the service is accessible. + + :return: dict containing the status of the service + """ + + # Get the service root + # Override the timeout since the service root is expected to be readily + # available. + service_root = self.get_request(self.root_uri + self.service_root, timeout=10) + if service_root['ret'] is False: + # Failed, either due to a timeout or HTTP error; not available + return {'ret': True, 'available': False} + + # Successfully accessed the service root; available + return {'ret': True, 'available': True} + def get_logs(self): log_svcs_uri_list = [] list_of_logs = [] @@ -1083,11 +1104,12 @@ class RedfishUtils(object): return self.manage_power(command, self.systems_uri, '#ComputerSystem.Reset') - def manage_manager_power(self, command): + def manage_manager_power(self, command, wait=False, wait_timeout=120): return self.manage_power(command, self.manager_uri, - '#Manager.Reset') + '#Manager.Reset', wait, wait_timeout) - def manage_power(self, command, resource_uri, action_name): + def manage_power(self, command, resource_uri, action_name, wait=False, + wait_timeout=120): key = "Actions" reset_type_values = ['On', 'ForceOff', 'GracefulShutdown', 'GracefulRestart', 'ForceRestart', 'Nmi', @@ -1147,6 +1169,30 @@ class RedfishUtils(object): response = self.post_request(self.root_uri + action_uri, payload) if response['ret'] is False: return response + + # If requested to wait for the service to be available again, block + # until it's ready + if wait: + elapsed_time = 0 + start_time = time.time() + # Start with a large enough sleep. Some services will process new + # requests while in the middle of shutting down, thus breaking out + # early. + time.sleep(30) + + # Periodically check for the service's availability. + while elapsed_time <= wait_timeout: + status = self.check_service_availability() + if status['available']: + # It's available; we're done + break + time.sleep(5) + elapsed_time = time.time() - start_time + + if elapsed_time > wait_timeout: + # Exhausted the wait timer; error + return {'ret': False, 'changed': True, + 'msg': 'The service did not become available after %d seconds' % wait_timeout} return {'ret': True, 'changed': True} def manager_reset_to_defaults(self, command): diff --git a/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py b/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py index d382ed93a..b0f3aeb5d 100644 --- a/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py +++ b/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py @@ -32,6 +32,19 @@ attributes: diff_mode: support: none options: + state: + description: + - > + If O(state=present) then the collection or role will be installed. + Note that the collections and roles are not updated with this option. + - > + Currently the O(state=latest) is ignored unless O(type=collection), and it will + ensure the collection is installed and updated to the latest available version. + - Please note that O(force=true) can be used to perform upgrade regardless of O(type). + type: str + choices: [ present, latest ] + default: present + version_added: 9.1.0 type: description: - The type of installation performed by C(ansible-galaxy). @@ -69,7 +82,8 @@ options: default: false force: description: - - Force overwriting an existing role or collection. + - Force overwriting existing roles and/or collections. + - It can be used for upgrading, but the module output will always report C(changed=true). - Using O(force=true) is mandatory when downgrading. type: bool default: false @@ -188,6 +202,7 @@ class AnsibleGalaxyInstall(ModuleHelper): output_params = ('type', 'name', 'dest', 'requirements_file', 'force', 'no_deps') module = dict( argument_spec=dict( + state=dict(type='str', choices=['present', 'latest'], default='present'), type=dict(type='str', choices=('collection', 'role', 'both'), required=True), name=dict(type='str'), requirements_file=dict(type='path'), @@ -206,6 +221,7 @@ class AnsibleGalaxyInstall(ModuleHelper): command_args_formats = dict( type=cmd_runner_fmt.as_func(lambda v: [] if v == 'both' else [v]), galaxy_cmd=cmd_runner_fmt.as_list(), + upgrade=cmd_runner_fmt.as_bool("--upgrade"), requirements_file=cmd_runner_fmt.as_opt_val('-r'), dest=cmd_runner_fmt.as_opt_val('-p'), force=cmd_runner_fmt.as_bool("--force"), @@ -244,9 +260,7 @@ class AnsibleGalaxyInstall(ModuleHelper): def __init_module__(self): self.runner, self.ansible_version = self._get_ansible_galaxy_version() if self.ansible_version < (2, 11): - self.module.fail_json( - msg="Support for Ansible 2.9 and ansible-base 2.10 has been removed." - ) + self.module.fail_json(msg="Support for Ansible 2.9 and ansible-base 2.10 has been removed.") self.vars.set("new_collections", {}, change=True) self.vars.set("new_roles", {}, change=True) if self.vars.type != "collection": @@ -299,8 +313,9 @@ class AnsibleGalaxyInstall(ModuleHelper): elif match.group("role"): self.vars.new_roles[match.group("role")] = match.group("rversion") - with self.runner("type galaxy_cmd force no_deps dest requirements_file name", output_process=process) as ctx: - ctx.run(galaxy_cmd="install") + upgrade = (self.vars.type == "collection" and self.vars.state == "latest") + with self.runner("type galaxy_cmd upgrade force no_deps dest requirements_file name", output_process=process) as ctx: + ctx.run(galaxy_cmd="install", upgrade=upgrade) if self.verbosity > 2: self.vars.set("run_info", ctx.run_info) diff --git a/ansible_collections/community/general/plugins/modules/cargo.py b/ansible_collections/community/general/plugins/modules/cargo.py index ba9c05ed7..2fc729da2 100644 --- a/ansible_collections/community/general/plugins/modules/cargo.py +++ b/ansible_collections/community/general/plugins/modules/cargo.py @@ -1,6 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2021 Radek Sprta <mail@radeksprta.eu> +# Copyright (c) 2024 Colin Nolan <cn580@alumni.york.ac.uk> # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later @@ -65,6 +66,13 @@ options: type: str default: present choices: [ "present", "absent", "latest" ] + directory: + description: + - Path to the source directory to install the Rust package from. + - This is only used when installing packages. + type: path + required: false + version_added: 9.1.0 requirements: - cargo installed """ @@ -98,8 +106,14 @@ EXAMPLES = r""" community.general.cargo: name: ludusavi state: latest + +- name: Install "ludusavi" Rust package from source directory + community.general.cargo: + name: ludusavi + directory: /path/to/ludusavi/source """ +import json import os import re @@ -115,6 +129,7 @@ class Cargo(object): self.state = kwargs["state"] self.version = kwargs["version"] self.locked = kwargs["locked"] + self.directory = kwargs["directory"] @property def path(self): @@ -143,7 +158,7 @@ class Cargo(object): data, dummy = self._exec(cmd, True, False, False) - package_regex = re.compile(r"^([\w\-]+) v(.+):$") + package_regex = re.compile(r"^([\w\-]+) v(\S+).*:$") installed = {} for line in data.splitlines(): package_info = package_regex.match(line) @@ -163,19 +178,53 @@ class Cargo(object): if self.version: cmd.append("--version") cmd.append(self.version) + if self.directory: + cmd.append("--path") + cmd.append(self.directory) return self._exec(cmd) def is_outdated(self, name): installed_version = self.get_installed().get(name) + latest_version = ( + self.get_latest_published_version(name) + if not self.directory + else self.get_source_directory_version(name) + ) + return installed_version != latest_version + def get_latest_published_version(self, name): cmd = ["search", name, "--limit", "1"] data, dummy = self._exec(cmd, True, False, False) match = re.search(r'"(.+)"', data) - if match: - latest_version = match.group(1) - - return installed_version != latest_version + if not match: + self.module.fail_json( + msg="No published version for package %s found" % name + ) + return match.group(1) + + def get_source_directory_version(self, name): + cmd = [ + "metadata", + "--format-version", + "1", + "--no-deps", + "--manifest-path", + os.path.join(self.directory, "Cargo.toml"), + ] + data, dummy = self._exec(cmd, True, False, False) + manifest = json.loads(data) + + package = next( + (package for package in manifest["packages"] if package["name"] == name), + None, + ) + if not package: + self.module.fail_json( + msg="Package %s not defined in source, found: %s" + % (name, [x["name"] for x in manifest["packages"]]) + ) + return package["version"] def uninstall(self, packages=None): cmd = ["uninstall"] @@ -191,16 +240,21 @@ def main(): state=dict(default="present", choices=["present", "absent", "latest"]), version=dict(default=None, type="str"), locked=dict(default=False, type="bool"), + directory=dict(default=None, type="path"), ) module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True) name = module.params["name"] state = module.params["state"] version = module.params["version"] + directory = module.params["directory"] if not name: module.fail_json(msg="Package name must be specified") + if directory is not None and not os.path.isdir(directory): + module.fail_json(msg="Source directory does not exist") + # Set LANG env since we parse stdout module.run_command_environ_update = dict( LANG="C", LC_ALL="C", LC_MESSAGES="C", LC_CTYPE="C" diff --git a/ansible_collections/community/general/plugins/modules/consul_agent_check.py b/ansible_collections/community/general/plugins/modules/consul_agent_check.py new file mode 100644 index 000000000..373926004 --- /dev/null +++ b/ansible_collections/community/general/plugins/modules/consul_agent_check.py @@ -0,0 +1,254 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024, Michael Ilg +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = ''' +module: consul_agent_check +short_description: Add, modify, and delete checks within a consul cluster +version_added: 9.1.0 +description: + - Allows the addition, modification and deletion of checks in a consul + cluster via the agent. For more details on using and configuring Checks, + see U(https://developer.hashicorp.com/consul/api-docs/agent/check). + - Currently, there is no complete way to retrieve the script, interval or TTL + metadata for a registered check. Without this metadata it is not possible to + tell if the data supplied with ansible represents a change to a check. As a + result this does not attempt to determine changes and will always report a + changed occurred. An API method is planned to supply this metadata so at that + stage change management will be added. +author: + - Michael Ilg (@Ilgmi) +extends_documentation_fragment: + - community.general.consul + - community.general.consul.actiongroup_consul + - community.general.consul.token + - community.general.attributes +attributes: + check_mode: + support: full + details: + - The result is the object as it is defined in the module options and not the object structure of the consul API. + For a better overview of what the object structure looks like, + take a look at U(https://developer.hashicorp.com/consul/api-docs/agent/check#list-checks). + diff_mode: + support: partial + details: + - In check mode the diff will show the object as it is defined in the module options and not the object structure of the consul API. +options: + state: + description: + - Whether the check should be present or absent. + choices: ['present', 'absent'] + default: present + type: str + name: + description: + - Required name for the service check. + type: str + id: + description: + - Specifies a unique ID for this check on the node. This defaults to the O(name) parameter, but it may be necessary to provide + an ID for uniqueness. This value will return in the response as "CheckId". + type: str + interval: + description: + - The interval at which the service check will be run. + This is a number with a V(s) or V(m) suffix to signify the units of seconds or minutes, for example V(15s) or V(1m). + If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s). + - Required if one of the parameters O(args), O(http), or O(tcp) is specified. + type: str + notes: + description: + - Notes to attach to check when registering it. + type: str + args: + description: + - Specifies command arguments to run to update the status of the check. + - Requires O(interval) to be provided. + - Mutually exclusive with O(ttl), O(tcp) and O(http). + type: list + elements: str + ttl: + description: + - Checks can be registered with a TTL instead of a O(args) and O(interval) + this means that the service will check in with the agent before the + TTL expires. If it doesn't the check will be considered failed. + Required if registering a check and the script an interval are missing + Similar to the interval this is a number with a V(s) or V(m) suffix to + signify the units of seconds or minutes, for example V(15s) or V(1m). + If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s). + - Mutually exclusive with O(args), O(tcp) and O(http). + type: str + tcp: + description: + - Checks can be registered with a TCP port. This means that consul + will check if the connection attempt to that port is successful (that is, the port is currently accepting connections). + The format is V(host:port), for example V(localhost:80). + - Requires O(interval) to be provided. + - Mutually exclusive with O(args), O(ttl) and O(http). + type: str + version_added: '1.3.0' + http: + description: + - Checks can be registered with an HTTP endpoint. This means that consul + will check that the http endpoint returns a successful HTTP status. + - Requires O(interval) to be provided. + - Mutually exclusive with O(args), O(ttl) and O(tcp). + type: str + timeout: + description: + - A custom HTTP check timeout. The consul default is 10 seconds. + Similar to the interval this is a number with a V(s) or V(m) suffix to + signify the units of seconds or minutes, for example V(15s) or V(1m). + If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s). + type: str + service_id: + description: + - The ID for the service, must be unique per node. If O(state=absent), + defaults to the service name if supplied. + type: str +''' + +EXAMPLES = ''' +- name: Register tcp check for service 'nginx' + community.general.consul_agent_check: + name: nginx_tcp_check + service_id: nginx + interval: 60s + tcp: localhost:80 + notes: "Nginx Check" + +- name: Register http check for service 'nginx' + community.general.consul_agent_check: + name: nginx_http_check + service_id: nginx + interval: 60s + http: http://localhost:80/status + notes: "Nginx Check" + +- name: Remove check for service 'nginx' + community.general.consul_agent_check: + state: absent + id: nginx_http_check + service_id: "{{ nginx_service.ID }}" +''' + +RETURN = """ +check: + description: The check as returned by the consul HTTP API. + returned: always + type: dict + sample: + CheckID: nginx_check + ServiceID: nginx + Interval: 30s + Type: http + Notes: Nginx Check +operation: + description: The operation performed. + returned: changed + type: str + sample: update +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.community.general.plugins.module_utils.consul import ( + AUTH_ARGUMENTS_SPEC, + OPERATION_CREATE, + OPERATION_UPDATE, + OPERATION_DELETE, + OPERATION_READ, + _ConsulModule, + validate_check, +) + +_ARGUMENT_SPEC = { + "state": dict(default="present", choices=["present", "absent"]), + "name": dict(type='str'), + "id": dict(type='str'), + "interval": dict(type='str'), + "notes": dict(type='str'), + "args": dict(type='list', elements='str'), + "http": dict(type='str'), + "tcp": dict(type='str'), + "ttl": dict(type='str'), + "timeout": dict(type='str'), + "service_id": dict(type='str'), +} + +_MUTUALLY_EXCLUSIVE = [ + ('args', 'ttl', 'tcp', 'http'), +] + +_REQUIRED_IF = [ + ('state', 'present', ['name']), + ('state', 'absent', ('id', 'name'), True), +] + +_REQUIRED_BY = { + 'args': 'interval', + 'http': 'interval', + 'tcp': 'interval', +} + +_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC) + + +class ConsulAgentCheckModule(_ConsulModule): + api_endpoint = "agent/check" + result_key = "check" + unique_identifiers = ["id", "name"] + operational_attributes = {"Node", "CheckID", "Output", "ServiceName", "ServiceTags", + "Status", "Type", "ExposedPort", "Definition"} + + def endpoint_url(self, operation, identifier=None): + if operation == OPERATION_READ: + return "agent/checks" + if operation in [OPERATION_CREATE, OPERATION_UPDATE]: + return "/".join([self.api_endpoint, "register"]) + if operation == OPERATION_DELETE: + return "/".join([self.api_endpoint, "deregister", identifier]) + + return super(ConsulAgentCheckModule, self).endpoint_url(operation, identifier) + + def read_object(self): + url = self.endpoint_url(OPERATION_READ) + checks = self.get(url) + identifier = self.id_from_obj(self.params) + if identifier in checks: + return checks[identifier] + return None + + def prepare_object(self, existing, obj): + existing = super(ConsulAgentCheckModule, self).prepare_object(existing, obj) + validate_check(existing) + return existing + + def delete_object(self, obj): + if not self._module.check_mode: + self.put(self.endpoint_url(OPERATION_DELETE, obj.get("CheckID"))) + return {} + + +def main(): + module = AnsibleModule( + _ARGUMENT_SPEC, + mutually_exclusive=_MUTUALLY_EXCLUSIVE, + required_if=_REQUIRED_IF, + required_by=_REQUIRED_BY, + supports_check_mode=True, + ) + + consul_module = ConsulAgentCheckModule(module) + consul_module.execute() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/community/general/plugins/modules/consul_agent_service.py b/ansible_collections/community/general/plugins/modules/consul_agent_service.py new file mode 100644 index 000000000..a8ef09897 --- /dev/null +++ b/ansible_collections/community/general/plugins/modules/consul_agent_service.py @@ -0,0 +1,289 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024, Michael Ilg +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = ''' +module: consul_agent_service +short_description: Add, modify and delete services within a consul cluster +version_added: 9.1.0 +description: + - Allows the addition, modification and deletion of services in a consul + cluster via the agent. + - There are currently no plans to create services and checks in one. + This is because the Consul API does not provide checks for a service and + the checks themselves do not match the module parameters. + Therefore, only a service without checks can be created in this module. +author: + - Michael Ilg (@Ilgmi) +extends_documentation_fragment: + - community.general.consul + - community.general.consul.actiongroup_consul + - community.general.consul.token + - community.general.attributes +attributes: + check_mode: + support: full + diff_mode: + support: partial + details: + - In check mode the diff will miss operational attributes. +options: + state: + description: + - Whether the service should be present or absent. + choices: ['present', 'absent'] + default: present + type: str + name: + description: + - Unique name for the service on a node, must be unique per node, + required if registering a service. + type: str + id: + description: + - Specifies a unique ID for this service. This must be unique per agent. This defaults to the O(name) parameter if not provided. + If O(state=absent), defaults to the service name if supplied. + type: str + tags: + description: + - Tags that will be attached to the service registration. + type: list + elements: str + address: + description: + - The address to advertise that the service will be listening on. + This value will be passed as the C(address) parameter to Consul's + C(/v1/agent/service/register) API method, so refer to the Consul API + documentation for further details. + type: str + meta: + description: + - Optional meta data used for filtering. + For keys, the characters C(A-Z), C(a-z), C(0-9), C(_), C(-) are allowed. + Not allowed characters are replaced with underscores. + type: dict + service_port: + description: + - The port on which the service is listening. Can optionally be supplied for + registration of a service, that is if O(name) or O(id) is set. + type: int + enable_tag_override: + description: + - Specifies to disable the anti-entropy feature for this service's tags. + If EnableTagOverride is set to true then external agents can update this service in the catalog and modify the tags. + type: bool + default: False + weights: + description: + - Specifies weights for the service + type: dict + suboptions: + passing: + description: + - Weights for passing. + type: int + default: 1 + warning: + description: + - Weights for warning. + type: int + default: 1 + default: {"passing": 1, "warning": 1} +''' + +EXAMPLES = ''' +- name: Register nginx service with the local consul agent + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + +- name: Register nginx with a tcp check + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + +- name: Register nginx with an http check + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + +- name: Register external service nginx available at 10.1.5.23 + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + address: 10.1.5.23 + +- name: Register nginx with some service tags + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + tags: + - prod + - webservers + +- name: Register nginx with some service meta + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: nginx + service_port: 80 + meta: + nginx_version: 1.25.3 + +- name: Remove nginx service + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + service_id: nginx + state: absent + +- name: Register celery worker service + community.general.consul_agent_service: + host: consul1.example.com + token: some_management_acl + name: celery-worker + tags: + - prod + - worker +''' + +RETURN = """ +service: + description: The service as returned by the consul HTTP API. + returned: always + type: dict + sample: + ID: nginx + Service: nginx + Address: localhost + Port: 80 + Tags: + - http + Meta: + - nginx_version: 1.23.3 + Datacenter: dc1 + Weights: + Passing: 1 + Warning: 1 + ContentHash: 61a245cd985261ac + EnableTagOverride: false +operation: + description: The operation performed. + returned: changed + type: str + sample: update +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.community.general.plugins.module_utils.consul import ( + AUTH_ARGUMENTS_SPEC, + OPERATION_CREATE, + OPERATION_UPDATE, + OPERATION_DELETE, + _ConsulModule +) + +_CHECK_MUTUALLY_EXCLUSIVE = [('args', 'ttl', 'tcp', 'http')] +_CHECK_REQUIRED_BY = { + 'args': 'interval', + 'http': 'interval', + 'tcp': 'interval', +} + +_ARGUMENT_SPEC = { + "state": dict(default="present", choices=["present", "absent"]), + "name": dict(type='str'), + "id": dict(type='str'), + "tags": dict(type='list', elements='str'), + "address": dict(type='str'), + "meta": dict(type='dict'), + "service_port": dict(type='int'), + "enable_tag_override": dict(type='bool', default=False), + "weights": dict(type='dict', options=dict( + passing=dict(type='int', default=1, no_log=False), + warning=dict(type='int', default=1) + ), default={"passing": 1, "warning": 1}) +} + +_REQUIRED_IF = [ + ('state', 'present', ['name']), + ('state', 'absent', ('id', 'name'), True), +] + +_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC) + + +class ConsulAgentServiceModule(_ConsulModule): + api_endpoint = "agent/service" + result_key = "service" + unique_identifiers = ["id", "name"] + operational_attributes = {"Service", "ContentHash", "Datacenter"} + + def endpoint_url(self, operation, identifier=None): + if operation in [OPERATION_CREATE, OPERATION_UPDATE]: + return "/".join([self.api_endpoint, "register"]) + if operation == OPERATION_DELETE: + return "/".join([self.api_endpoint, "deregister", identifier]) + + return super(ConsulAgentServiceModule, self).endpoint_url(operation, identifier) + + def prepare_object(self, existing, obj): + existing = super(ConsulAgentServiceModule, self).prepare_object(existing, obj) + if "ServicePort" in existing: + existing["Port"] = existing.pop("ServicePort") + + if "ID" not in existing: + existing["ID"] = existing["Name"] + + return existing + + def needs_update(self, api_obj, module_obj): + obj = {} + if "Service" in api_obj: + obj["Service"] = api_obj["Service"] + api_obj = self.prepare_object(api_obj, obj) + + if "Name" in module_obj: + module_obj["Service"] = module_obj.pop("Name") + if "ServicePort" in module_obj: + module_obj["Port"] = module_obj.pop("ServicePort") + + return super(ConsulAgentServiceModule, self).needs_update(api_obj, module_obj) + + def delete_object(self, obj): + if not self._module.check_mode: + url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True)) + self.put(url) + return {} + + +def main(): + module = AnsibleModule( + _ARGUMENT_SPEC, + required_if=_REQUIRED_IF, + supports_check_mode=True, + ) + + consul_module = ConsulAgentServiceModule(module) + consul_module.execute() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/community/general/plugins/modules/consul_auth_method.py b/ansible_collections/community/general/plugins/modules/consul_auth_method.py index afe549f6e..e28474c31 100644 --- a/ansible_collections/community/general/plugins/modules/consul_auth_method.py +++ b/ansible_collections/community/general/plugins/modules/consul_auth_method.py @@ -168,7 +168,7 @@ def normalize_ttl(ttl): class ConsulAuthMethodModule(_ConsulModule): api_endpoint = "acl/auth-method" result_key = "auth_method" - unique_identifier = "name" + unique_identifiers = ["name"] def map_param(self, k, v, is_update): if k == "config" and v: diff --git a/ansible_collections/community/general/plugins/modules/consul_binding_rule.py b/ansible_collections/community/general/plugins/modules/consul_binding_rule.py index 88496f867..6a2882cee 100644 --- a/ansible_collections/community/general/plugins/modules/consul_binding_rule.py +++ b/ansible_collections/community/general/plugins/modules/consul_binding_rule.py @@ -124,7 +124,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import ( class ConsulBindingRuleModule(_ConsulModule): api_endpoint = "acl/binding-rule" result_key = "binding_rule" - unique_identifier = "id" + unique_identifiers = ["id"] def read_object(self): url = "acl/binding-rules?authmethod={0}".format(self.params["auth_method"]) diff --git a/ansible_collections/community/general/plugins/modules/consul_policy.py b/ansible_collections/community/general/plugins/modules/consul_policy.py index 2ed6021b0..36139ac09 100644 --- a/ansible_collections/community/general/plugins/modules/consul_policy.py +++ b/ansible_collections/community/general/plugins/modules/consul_policy.py @@ -145,7 +145,7 @@ _ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC) class ConsulPolicyModule(_ConsulModule): api_endpoint = "acl/policy" result_key = "policy" - unique_identifier = "id" + unique_identifiers = ["id"] def endpoint_url(self, operation, identifier=None): if operation == OPERATION_READ: diff --git a/ansible_collections/community/general/plugins/modules/consul_role.py b/ansible_collections/community/general/plugins/modules/consul_role.py index e07e2036f..d6c4e4dd9 100644 --- a/ansible_collections/community/general/plugins/modules/consul_role.py +++ b/ansible_collections/community/general/plugins/modules/consul_role.py @@ -212,7 +212,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import ( class ConsulRoleModule(_ConsulModule): api_endpoint = "acl/role" result_key = "role" - unique_identifier = "id" + unique_identifiers = ["id"] def endpoint_url(self, operation, identifier=None): if operation == OPERATION_READ: diff --git a/ansible_collections/community/general/plugins/modules/consul_token.py b/ansible_collections/community/general/plugins/modules/consul_token.py index 02bc544da..c8bc8bc27 100644 --- a/ansible_collections/community/general/plugins/modules/consul_token.py +++ b/ansible_collections/community/general/plugins/modules/consul_token.py @@ -235,13 +235,13 @@ def normalize_link_obj(api_obj, module_obj, key): class ConsulTokenModule(_ConsulModule): api_endpoint = "acl/token" result_key = "token" - unique_identifier = "accessor_id" + unique_identifiers = ["accessor_id"] create_only_fields = {"expiration_ttl"} def read_object(self): # if `accessor_id` is not supplied we can only create objects and are not idempotent - if not self.params.get(self.unique_identifier): + if not self.id_from_obj(self.params): return None return super(ConsulTokenModule, self).read_object() diff --git a/ansible_collections/community/general/plugins/modules/django_check.py b/ansible_collections/community/general/plugins/modules/django_check.py new file mode 100644 index 000000000..1553da7a3 --- /dev/null +++ b/ansible_collections/community/general/plugins/modules/django_check.py @@ -0,0 +1,113 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = """ +module: django_check +author: + - Alexei Znamensky (@russoz) +short_description: Wrapper for C(django-admin check) +version_added: 9.1.0 +description: + - This module is a wrapper for the execution of C(django-admin check). +extends_documentation_fragment: + - community.general.attributes + - community.general.django +options: + database: + description: + - Specify databases to run checks against. + - If not specified, Django will not run database tests. + type: list + elements: str + deploy: + description: + - Include additional checks relevant in a deployment setting. + type: bool + default: false + fail_level: + description: + - Message level that will trigger failure. + - Default is the Django default value. Check the documentation for the version being used. + type: str + choices: [CRITICAL, ERROR, WARNING, INFO, DEBUG] + tags: + description: + - Restrict checks to specific tags. + type: list + elements: str + apps: + description: + - Restrict checks to specific applications. + - Default is to check all applications. + type: list + elements: str +notes: + - The outcome of the module is found in the common return values RV(ignore:stdout), RV(ignore:stderr), RV(ignore:rc). + - The module will fail if RV(ignore:rc) is not zero. +attributes: + check_mode: + support: full + diff_mode: + support: none +""" + +EXAMPLES = """ +- name: Check the entire project + community.general.django_check: + settings: myproject.settings + +- name: Create the project using specific databases + community.general.django_check: + database: + - somedb + - myotherdb + settings: fancysite.settings + pythonpath: /home/joedoe/project/fancysite + venv: /home/joedoe/project/fancysite/venv +""" + +RETURN = """ +run_info: + description: Command-line execution information. + type: dict + returned: success and C(verbosity) >= 3 +""" + +from ansible_collections.community.general.plugins.module_utils.django import DjangoModuleHelper +from ansible_collections.community.general.plugins.module_utils.cmd_runner import cmd_runner_fmt + + +class DjangoCheck(DjangoModuleHelper): + module = dict( + argument_spec=dict( + database=dict(type="list", elements="str"), + deploy=dict(type="bool", default=False), + fail_level=dict(type="str", choices=["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]), + tags=dict(type="list", elements="str"), + apps=dict(type="list", elements="str"), + ), + supports_check_mode=True, + ) + arg_formats = dict( + database=cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_val)("--database"), + deploy=cmd_runner_fmt.as_bool("--deploy"), + fail_level=cmd_runner_fmt.as_opt_val("--fail-level"), + tags=cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_val)("--tag"), + apps=cmd_runner_fmt.as_list(), + ) + django_admin_cmd = "check" + django_admin_arg_order = "database deploy fail_level tags apps" + + +def main(): + DjangoCheck.execute() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/general/plugins/modules/django_createcachetable.py b/ansible_collections/community/general/plugins/modules/django_createcachetable.py new file mode 100644 index 000000000..b038e0358 --- /dev/null +++ b/ansible_collections/community/general/plugins/modules/django_createcachetable.py @@ -0,0 +1,67 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = """ +module: django_createcachetable +author: + - Alexei Znamensky (@russoz) +short_description: Wrapper for C(django-admin createcachetable) +version_added: 9.1.0 +description: + - This module is a wrapper for the execution of C(django-admin createcachetable). +extends_documentation_fragment: + - community.general.attributes + - community.general.django + - community.general.django.database +attributes: + check_mode: + support: full + diff_mode: + support: none +""" + +EXAMPLES = """ +- name: Create cache table in the default database + community.general.django_createcachetable: + settings: myproject.settings + +- name: Create cache table in the other database + community.general.django_createcachetable: + database: myotherdb + settings: fancysite.settings + pythonpath: /home/joedoe/project/fancysite + venv: /home/joedoe/project/fancysite/venv +""" + +RETURN = """ +run_info: + description: Command-line execution information. + type: dict + returned: success and O(verbosity) >= 3 +""" + +from ansible_collections.community.general.plugins.module_utils.django import DjangoModuleHelper + + +class DjangoCreateCacheTable(DjangoModuleHelper): + module = dict( + supports_check_mode=True, + ) + django_admin_cmd = "createcachetable" + django_admin_arg_order = "noinput database dry_run" + _django_args = ["noinput", "database", "dry_run"] + _check_mode_arg = "dry_run" + + +def main(): + DjangoCreateCacheTable.execute() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/community/general/plugins/modules/git_config.py b/ansible_collections/community/general/plugins/modules/git_config.py index a8d2ebe97..95969c1b3 100644 --- a/ansible_collections/community/general/plugins/modules/git_config.py +++ b/ansible_collections/community/general/plugins/modules/git_config.py @@ -18,7 +18,7 @@ author: - Matthew Gamble (@djmattyg007) - Marius Gedminas (@mgedmin) requirements: ['git'] -short_description: Read and write git configuration +short_description: Update git configuration description: - The M(community.general.git_config) module changes git configuration by invoking C(git config). This is needed if you do not want to use M(ansible.builtin.template) for the entire git @@ -36,6 +36,8 @@ options: list_all: description: - List all settings (optionally limited to a given O(scope)). + - This option is B(deprecated) and will be removed from community.general 11.0.0. + Please use M(community.general.git_config_info) instead. type: bool default: false name: @@ -74,6 +76,8 @@ options: description: - When specifying the name of a single setting, supply a value to set that setting to the given value. + - From community.general 11.0.0 on, O(value) will be required if O(state=present). + To read values, use the M(community.general.git_config_info) module instead. type: str add_mode: description: @@ -143,29 +147,6 @@ EXAMPLES = ''' repo: /etc scope: local value: 'root@{{ ansible_fqdn }}' - -- name: Read individual values from git config - community.general.git_config: - name: alias.ci - scope: global - -- name: Scope system is also assumed when reading values, unless list_all=true - community.general.git_config: - name: alias.diffc - -- name: Read all values from git config - community.general.git_config: - list_all: true - scope: global - -- name: When list_all is yes and no scope is specified, you get configuration from all scopes - community.general.git_config: - list_all: true - -- name: Specify a repository to include local settings - community.general.git_config: - list_all: true - repo: /path/to/repo.git ''' RETURN = ''' @@ -193,7 +174,7 @@ from ansible.module_utils.basic import AnsibleModule def main(): module = AnsibleModule( argument_spec=dict( - list_all=dict(required=False, type='bool', default=False), + list_all=dict(required=False, type='bool', default=False, removed_in_version='11.0.0', removed_from_collection='community.general'), name=dict(type='str'), repo=dict(type='path'), file=dict(type='path'), @@ -222,6 +203,14 @@ def main(): new_value = params['value'] or '' add_mode = params['add_mode'] + if not unset and not new_value and not params['list_all']: + module.deprecate( + 'If state=present, a value must be specified from community.general 11.0.0 on.' + ' To read a config value, use the community.general.git_config_info module instead.', + version='11.0.0', + collection_name='community.general', + ) + scope = determine_scope(params) cwd = determine_cwd(scope, params) @@ -263,7 +252,7 @@ def main(): module.exit_json(changed=False, msg='', config_value=old_values[0] if old_values else '') elif unset and not out: module.exit_json(changed=False, msg='no setting to unset') - elif new_value in old_values and (len(old_values) == 1 or add_mode == "add"): + elif new_value in old_values and (len(old_values) == 1 or add_mode == "add") and not unset: module.exit_json(changed=False, msg="") # Until this point, the git config was just read and in case no change is needed, the module has already exited. diff --git a/ansible_collections/community/general/plugins/modules/homectl.py b/ansible_collections/community/general/plugins/modules/homectl.py index ca4c19a87..7751651c8 100644 --- a/ansible_collections/community/general/plugins/modules/homectl.py +++ b/ansible_collections/community/general/plugins/modules/homectl.py @@ -17,6 +17,12 @@ short_description: Manage user accounts with systemd-homed version_added: 4.4.0 description: - Manages a user's home directory managed by systemd-homed. +notes: + - This module does B(not) work with Python 3.13 or newer. It uses the deprecated L(crypt Python module, + https://docs.python.org/3.12/library/crypt.html) from the Python standard library, which was removed + from Python 3.13. +requirements: + - Python 3.12 or earlier extends_documentation_fragment: - community.general.attributes attributes: @@ -263,12 +269,21 @@ data: } ''' -import crypt import json -from ansible.module_utils.basic import AnsibleModule +import traceback +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible.module_utils.basic import jsonify from ansible.module_utils.common.text.formatters import human_to_bytes +try: + import crypt +except ImportError: + HAS_CRYPT = False + CRYPT_IMPORT_ERROR = traceback.format_exc() +else: + HAS_CRYPT = True + CRYPT_IMPORT_ERROR = None + class Homectl(object): '''#TODO DOC STRINGS''' @@ -591,6 +606,12 @@ def main(): ] ) + if not HAS_CRYPT: + module.fail_json( + msg=missing_required_lib('crypt (part of Python 3.13 standard library)'), + exception=CRYPT_IMPORT_ERROR, + ) + homectl = Homectl(module) homectl.result['state'] = homectl.state diff --git a/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py b/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py index cb4ce03dd..59475a55b 100644 --- a/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py +++ b/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py @@ -35,13 +35,14 @@ options: record_type: description: - The type of DNS record name. - - Currently, 'A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV' and 'MX' are supported. + - Currently, 'A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX' and 'SSHFP' are supported. - "'A6', 'CNAME', 'DNAME' and 'TXT' are added in version 2.5." - "'SRV' and 'MX' are added in version 2.8." - "'NS' are added in comunity.general 8.2.0." + - "'SSHFP' are added in community.general 9.1.0." required: false default: 'A' - choices: ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'MX', 'NS', 'PTR', 'SRV', 'TXT'] + choices: ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'MX', 'NS', 'PTR', 'SRV', 'TXT', 'SSHFP'] type: str record_value: description: @@ -57,6 +58,7 @@ options: - In the case of 'TXT' record type, this will be a text. - In the case of 'SRV' record type, this will be a service record. - In the case of 'MX' record type, this will be a mail exchanger record. + - In the case of 'SSHFP' record type, this will be an SSH fingerprint record. type: str record_values: description: @@ -71,6 +73,7 @@ options: - In the case of 'TXT' record type, this will be a text. - In the case of 'SRV' record type, this will be a service record. - In the case of 'MX' record type, this will be a mail exchanger record. + - In the case of 'SSHFP' record type, this will be an SSH fingerprint record. type: list elements: str record_ttl: @@ -175,6 +178,20 @@ EXAMPLES = r''' ipa_host: ipa.example.com ipa_user: admin ipa_pass: ChangeMe! + +- name: Retrieve the current sshfp fingerprints + ansible.builtin.command: ssh-keyscan -D localhost + register: ssh_hostkeys + +- name: Update the SSHFP records in DNS + community.general.ipa_dnsrecord: + name: "{{ inventory_hostname}}" + zone_name: example.com + record_type: 'SSHFP' + record_values: "{{ ssh_hostkeys.stdout.split('\n') | map('split', 'SSHFP ') | map('last') | list }}" + ipa_host: ipa.example.com + ipa_user: admin + ipa_pass: ChangeMe! ''' RETURN = r''' @@ -228,6 +245,8 @@ class DNSRecordIPAClient(IPAClient): item.update(srvrecord=value) elif details['record_type'] == 'MX': item.update(mxrecord=value) + elif details['record_type'] == 'SSHFP': + item.update(sshfprecord=value) self._post_json(method='dnsrecord_add', name=zone_name, item=item) @@ -266,6 +285,8 @@ def get_dnsrecord_dict(details=None): module_dnsrecord.update(srvrecord=details['record_values']) elif details['record_type'] == 'MX' and details['record_values']: module_dnsrecord.update(mxrecord=details['record_values']) + elif details['record_type'] == 'SSHFP' and details['record_values']: + module_dnsrecord.update(sshfprecord=details['record_values']) if details.get('record_ttl'): module_dnsrecord.update(dnsttl=details['record_ttl']) @@ -328,7 +349,7 @@ def ensure(module, client): def main(): - record_types = ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX'] + record_types = ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX', 'SSHFP'] argument_spec = ipa_argument_spec() argument_spec.update( zone_name=dict(type='str', required=True), diff --git a/ansible_collections/community/general/plugins/modules/keycloak_client.py b/ansible_collections/community/general/plugins/modules/keycloak_client.py index 3628e5a51..efaa66e26 100644 --- a/ansible_collections/community/general/plugins/modules/keycloak_client.py +++ b/ansible_collections/community/general/plugins/modules/keycloak_client.py @@ -340,6 +340,42 @@ options: description: - Override realm authentication flow bindings. type: dict + suboptions: + browser: + description: + - Flow ID of the browser authentication flow. + - O(authentication_flow_binding_overrides.browser) + and O(authentication_flow_binding_overrides.browser_name) are mutually exclusive. + type: str + + browser_name: + description: + - Flow name of the browser authentication flow. + - O(authentication_flow_binding_overrides.browser) + and O(authentication_flow_binding_overrides.browser_name) are mutually exclusive. + aliases: + - browserName + type: str + version_added: 9.1.0 + + direct_grant: + description: + - Flow ID of the direct grant authentication flow. + - O(authentication_flow_binding_overrides.direct_grant) + and O(authentication_flow_binding_overrides.direct_grant_name) are mutually exclusive. + aliases: + - directGrant + type: str + + direct_grant_name: + description: + - Flow name of the direct grant authentication flow. + - O(authentication_flow_binding_overrides.direct_grant) + and O(authentication_flow_binding_overrides.direct_grant_name) are mutually exclusive. + aliases: + - directGrantName + type: str + version_added: 9.1.0 aliases: - authenticationFlowBindingOverrides version_added: 3.4.0 @@ -781,6 +817,64 @@ def sanitize_cr(clientrep): return normalise_cr(result) +def get_authentication_flow_id(flow_name, realm, kc): + """ Get the authentication flow ID based on the flow name, realm, and Keycloak client. + + Args: + flow_name (str): The name of the authentication flow. + realm (str): The name of the realm. + kc (KeycloakClient): The Keycloak client instance. + + Returns: + str: The ID of the authentication flow. + + Raises: + KeycloakAPIException: If the authentication flow with the given name is not found in the realm. + """ + flow = kc.get_authentication_flow_by_alias(flow_name, realm) + if flow: + return flow["id"] + kc.module.fail_json(msg='Authentification flow %s not found in realm %s' % (flow_name, realm)) + + +def flow_binding_from_dict_to_model(newClientFlowBinding, realm, kc): + """ Convert a dictionary representing client flow bindings to a model representation. + + Args: + newClientFlowBinding (dict): A dictionary containing client flow bindings. + realm (str): The name of the realm. + kc (KeycloakClient): An instance of the KeycloakClient class. + + Returns: + dict: A dictionary representing the model flow bindings. The dictionary has two keys: + - "browser" (str or None): The ID of the browser authentication flow binding, or None if not provided. + - "direct_grant" (str or None): The ID of the direct grant authentication flow binding, or None if not provided. + + Raises: + KeycloakAPIException: If the authentication flow with the given name is not found in the realm. + + """ + + modelFlow = { + "browser": None, + "direct_grant": None + } + + for k, v in newClientFlowBinding.items(): + if not v: + continue + if k == "browser": + modelFlow["browser"] = v + elif k == "browser_name": + modelFlow["browser"] = get_authentication_flow_id(v, realm, kc) + elif k == "direct_grant": + modelFlow["direct_grant"] = v + elif k == "direct_grant_name": + modelFlow["direct_grant"] = get_authentication_flow_id(v, realm, kc) + + return modelFlow + + def main(): """ Module execution @@ -799,6 +893,13 @@ def main(): config=dict(type='dict'), ) + authentication_flow_spec = dict( + browser=dict(type='str'), + browser_name=dict(type='str', aliases=['browserName']), + direct_grant=dict(type='str', aliases=['directGrant']), + direct_grant_name=dict(type='str', aliases=['directGrantName']), + ) + meta_args = dict( state=dict(default='present', choices=['present', 'absent']), realm=dict(type='str', default='master'), @@ -838,7 +939,13 @@ def main(): use_template_scope=dict(type='bool', aliases=['useTemplateScope']), use_template_mappers=dict(type='bool', aliases=['useTemplateMappers']), always_display_in_console=dict(type='bool', aliases=['alwaysDisplayInConsole']), - authentication_flow_binding_overrides=dict(type='dict', aliases=['authenticationFlowBindingOverrides']), + authentication_flow_binding_overrides=dict( + type='dict', + aliases=['authenticationFlowBindingOverrides'], + options=authentication_flow_spec, + required_one_of=[['browser', 'direct_grant', 'browser_name', 'direct_grant_name']], + mutually_exclusive=[['browser', 'browser_name'], ['direct_grant', 'direct_grant_name']], + ), protocol_mappers=dict(type='list', elements='dict', options=protmapper_spec, aliases=['protocolMappers']), authorization_settings=dict(type='dict', aliases=['authorizationSettings']), default_client_scopes=dict(type='list', elements='str', aliases=['defaultClientScopes']), @@ -900,6 +1007,8 @@ def main(): # they are not specified if client_param == 'protocol_mappers': new_param_value = [dict((k, v) for k, v in x.items() if x[k] is not None) for x in new_param_value] + elif client_param == 'authentication_flow_binding_overrides': + new_param_value = flow_binding_from_dict_to_model(new_param_value, realm, kc) changeset[camel(client_param)] = new_param_value diff --git a/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py b/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py index d24e0f1f2..b962b932c 100644 --- a/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py +++ b/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py @@ -301,10 +301,37 @@ end_state: ''' from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import KeycloakAPI, camel, \ - keycloak_argument_spec, get_token, KeycloakError + keycloak_argument_spec, get_token, KeycloakError, is_struct_included from ansible.module_utils.basic import AnsibleModule +def normalise_cr(clientscoperep, remove_ids=False): + """ Re-sorts any properties where the order so that diff's is minimised, and adds default values where appropriate so that the + the change detection is more effective. + + :param clientscoperep: the clientscoperep dict to be sanitized + :param remove_ids: If set to true, then the unique ID's of objects is removed to make the diff and checks for changed + not alert when the ID's of objects are not usually known, (e.g. for protocol_mappers) + :return: normalised clientscoperep dict + """ + # Avoid the dict passed in to be modified + clientscoperep = clientscoperep.copy() + + if 'attributes' in clientscoperep: + clientscoperep['attributes'] = list(sorted(clientscoperep['attributes'])) + + if 'protocolMappers' in clientscoperep: + clientscoperep['protocolMappers'] = sorted(clientscoperep['protocolMappers'], key=lambda x: (x.get('name'), x.get('protocol'), x.get('protocolMapper'))) + for mapper in clientscoperep['protocolMappers']: + if remove_ids: + mapper.pop('id', None) + + # Set to a default value. + mapper['consentRequired'] = mapper.get('consentRequired', False) + + return clientscoperep + + def sanitize_cr(clientscoperep): """ Removes probably sensitive details from a clientscoperep representation. @@ -317,7 +344,7 @@ def sanitize_cr(clientscoperep): if 'attributes' in result: if 'saml.signing.private.key' in result['attributes']: result['attributes']['saml.signing.private.key'] = 'no_log' - return result + return normalise_cr(result) def main(): @@ -458,6 +485,13 @@ def main(): result['diff'] = dict(before=sanitize_cr(before_clientscope), after=sanitize_cr(desired_clientscope)) if module.check_mode: + # We can only compare the current clientscope with the proposed updates we have + before_norm = normalise_cr(before_clientscope, remove_ids=True) + desired_norm = normalise_cr(desired_clientscope, remove_ids=True) + if module._diff: + result['diff'] = dict(before=sanitize_cr(before_norm), + after=sanitize_cr(desired_norm)) + result['changed'] = not is_struct_included(desired_norm, before_norm) module.exit_json(**result) # do the update diff --git a/ansible_collections/community/general/plugins/modules/launchd.py b/ansible_collections/community/general/plugins/modules/launchd.py index e5942ea7c..a6427bdb2 100644 --- a/ansible_collections/community/general/plugins/modules/launchd.py +++ b/ansible_collections/community/general/plugins/modules/launchd.py @@ -514,7 +514,8 @@ def main(): result['status']['current_pid'] != result['status']['previous_pid']): result['changed'] = True if module.check_mode: - result['changed'] = True + if result['status']['current_state'] != action: + result['changed'] = True module.exit_json(**result) diff --git a/ansible_collections/community/general/plugins/modules/openbsd_pkg.py b/ansible_collections/community/general/plugins/modules/openbsd_pkg.py index c83113611..69ac7bff8 100644 --- a/ansible_collections/community/general/plugins/modules/openbsd_pkg.py +++ b/ansible_collections/community/general/plugins/modules/openbsd_pkg.py @@ -24,7 +24,10 @@ attributes: check_mode: support: full diff_mode: - support: none + support: partial + version_added: 9.1.0 + details: + - Only works when check mode is not enabled. options: name: description: @@ -159,6 +162,20 @@ def execute_command(cmd, module): return module.run_command(cmd_args, environ_update={'TERM': 'dumb'}) +def get_all_installed(module): + """ + Get all installed packaged. Used to support diff mode + """ + command = 'pkg_info -Iq' + + rc, stdout, stderr = execute_command(command, module) + + if stderr: + module.fail_json(msg="failed in get_all_installed(): %s" % stderr) + + return stdout + + # Function used to find out if a package is currently installed. def get_package_state(names, pkg_spec, module): info_cmd = 'pkg_info -Iq' @@ -573,10 +590,13 @@ def main(): result['name'] = name result['state'] = state result['build'] = build + result['diff'] = {} # The data structure used to keep track of package information. pkg_spec = {} + new_package_list = original_package_list = get_all_installed(module) + if build is True: if not os.path.isdir(ports_dir): module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir)) @@ -661,6 +681,10 @@ def main(): result['changed'] = combined_changed + if result['changed'] and not module.check_mode: + new_package_list = get_all_installed(module) + result['diff'] = dict(before=original_package_list, after=new_package_list) + module.exit_json(**result) diff --git a/ansible_collections/community/general/plugins/modules/pacman.py b/ansible_collections/community/general/plugins/modules/pacman.py index 7f67b9103..f13bde317 100644 --- a/ansible_collections/community/general/plugins/modules/pacman.py +++ b/ansible_collections/community/general/plugins/modules/pacman.py @@ -367,8 +367,9 @@ class Pacman(object): self.install_packages(pkgs) self.success() - # This shouldn't happen... - self.fail("This is a bug") + # This happens if an empty list has been provided for name + self.add_exit_infos(msg='Nothing to do') + self.success() def install_packages(self, pkgs): pkgs_to_install = [] diff --git a/ansible_collections/community/general/plugins/modules/proxmox_kvm.py b/ansible_collections/community/general/plugins/modules/proxmox_kvm.py index 9fe805c7a..71cbb51fc 100644 --- a/ansible_collections/community/general/plugins/modules/proxmox_kvm.py +++ b/ansible_collections/community/general/plugins/modules/proxmox_kvm.py @@ -174,6 +174,7 @@ options: - Allow to force stop VM. - Can be used with states V(stopped), V(restarted), and V(absent). - This option has no default unless O(proxmox_default_behavior) is set to V(compatibility); then the default is V(false). + - Requires parameter O(archive). type: bool format: description: diff --git a/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py b/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py index 39d8307a4..e10b9dff6 100644 --- a/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py +++ b/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py @@ -57,6 +57,13 @@ options: - pending default: none version_added: 8.1.0 + network: + description: + - Whether to retrieve the current network status. + - Requires enabled/running qemu-guest-agent on qemu VMs. + type: bool + default: false + version_added: 9.1.0 extends_documentation_fragment: - community.general.proxmox.actiongroup_proxmox - community.general.proxmox.documentation @@ -172,7 +179,7 @@ class ProxmoxVmInfoAnsible(ProxmoxAnsible): msg="Failed to retrieve VMs information from cluster resources: %s" % e ) - def get_vms_from_nodes(self, cluster_machines, type, vmid=None, name=None, node=None, config=None): + def get_vms_from_nodes(self, cluster_machines, type, vmid=None, name=None, node=None, config=None, network=False): # Leave in dict only machines that user wants to know about filtered_vms = { vm: info for vm, info in cluster_machines.items() if not ( @@ -201,17 +208,23 @@ class ProxmoxVmInfoAnsible(ProxmoxAnsible): config_type = 0 if config == "pending" else 1 # GET /nodes/{node}/qemu/{vmid}/config current=[0/1] desired_vm["config"] = call_vm_getter(this_vm_id).config().get(current=config_type) + if network: + if type == "qemu": + desired_vm["network"] = call_vm_getter(this_vm_id).agent("network-get-interfaces").get()['result'] + elif type == "lxc": + desired_vm["network"] = call_vm_getter(this_vm_id).interfaces.get() + return filtered_vms - def get_qemu_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None): + def get_qemu_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None, network=False): try: - return self.get_vms_from_nodes(cluster_machines, "qemu", vmid, name, node, config) + return self.get_vms_from_nodes(cluster_machines, "qemu", vmid, name, node, config, network) except Exception as e: self.module.fail_json(msg="Failed to retrieve QEMU VMs information: %s" % e) - def get_lxc_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None): + def get_lxc_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None, network=False): try: - return self.get_vms_from_nodes(cluster_machines, "lxc", vmid, name, node, config) + return self.get_vms_from_nodes(cluster_machines, "lxc", vmid, name, node, config, network) except Exception as e: self.module.fail_json(msg="Failed to retrieve LXC VMs information: %s" % e) @@ -229,6 +242,7 @@ def main(): type="str", choices=["none", "current", "pending"], default="none", required=False ), + network=dict(type="bool", default=False, required=False), ) module_args.update(vm_info_args) @@ -245,6 +259,7 @@ def main(): vmid = module.params["vmid"] name = module.params["name"] config = module.params["config"] + network = module.params["network"] result = dict(changed=False) @@ -256,12 +271,12 @@ def main(): vms = {} if type == "lxc": - vms = proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config) + vms = proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config, network) elif type == "qemu": - vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config) + vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config, network) else: - vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config) - vms.update(proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config)) + vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config, network) + vms.update(proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config, network)) result["proxmox_vms"] = [info for vm, info in sorted(vms.items())] module.exit_json(**result) diff --git a/ansible_collections/community/general/plugins/modules/redfish_command.py b/ansible_collections/community/general/plugins/modules/redfish_command.py index d351e7c1d..0f7a64b81 100644 --- a/ansible_collections/community/general/plugins/modules/redfish_command.py +++ b/ansible_collections/community/general/plugins/modules/redfish_command.py @@ -288,6 +288,20 @@ options: type: str choices: [ ResetAll, PreserveNetworkAndUsers, PreserveNetwork ] version_added: 8.6.0 + wait: + required: false + description: + - Block until the service is ready again. + type: bool + default: false + version_added: 9.1.0 + wait_timeout: + required: false + description: + - How long to block until the service is ready again before giving up. + type: int + default: 120 + version_added: 9.1.0 author: - "Jose Delarosa (@jose-delarosa)" @@ -685,6 +699,16 @@ EXAMPLES = ''' username: "{{ username }}" password: "{{ password }}" + - name: Restart manager power gracefully and wait for it to be available + community.general.redfish_command: + category: Manager + command: GracefulRestart + resource_id: BMC + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + wait: True + - name: Restart manager power gracefully community.general.redfish_command: category: Manager @@ -841,7 +865,9 @@ def main(): ), strip_etag_quotes=dict(type='bool', default=False), reset_to_defaults_mode=dict(choices=['ResetAll', 'PreserveNetworkAndUsers', 'PreserveNetwork']), - bios_attributes=dict(type="dict") + bios_attributes=dict(type="dict"), + wait=dict(type='bool', default=False), + wait_timeout=dict(type='int', default=120), ), required_together=[ ('username', 'password'), @@ -1016,7 +1042,7 @@ def main(): command = 'PowerGracefulRestart' if command.startswith('Power'): - result = rf_utils.manage_manager_power(command) + result = rf_utils.manage_manager_power(command, module.params['wait'], module.params['wait_timeout']) elif command == 'ClearLogs': result = rf_utils.clear_logs() elif command == 'VirtualMediaInsert': diff --git a/ansible_collections/community/general/plugins/modules/redfish_info.py b/ansible_collections/community/general/plugins/modules/redfish_info.py index 3b594b7a2..efcb34f01 100644 --- a/ansible_collections/community/general/plugins/modules/redfish_info.py +++ b/ansible_collections/community/general/plugins/modules/redfish_info.py @@ -359,6 +359,16 @@ EXAMPLES = ''' baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + + - name: Check the availability of the service with a timeout of 5 seconds + community.general.redfish_info: + category: Service + command: CheckAvailability + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + timeout: 5 + register: result ''' RETURN = ''' @@ -385,6 +395,7 @@ CATEGORY_COMMANDS_ALL = { "GetUpdateStatus"], "Manager": ["GetManagerNicInventory", "GetVirtualMedia", "GetLogs", "GetNetworkProtocols", "GetHealthReport", "GetHostInterfaces", "GetManagerInventory", "GetServiceIdentification"], + "Service": ["CheckAvailability"], } CATEGORY_COMMANDS_DEFAULT = { @@ -393,7 +404,8 @@ CATEGORY_COMMANDS_DEFAULT = { "Accounts": "ListUsers", "Update": "GetFirmwareInventory", "Sessions": "GetSessions", - "Manager": "GetManagerNicInventory" + "Manager": "GetManagerNicInventory", + "Service": "CheckAvailability", } @@ -473,7 +485,13 @@ def main(): module.fail_json(msg="Invalid Category: %s" % category) # Organize by Categories / Commands - if category == "Systems": + if category == "Service": + # service-level commands are always available + for command in command_list: + if command == "CheckAvailability": + result["service"] = rf_utils.check_service_availability() + + elif category == "Systems": # execute only if we find a Systems resource resource = rf_utils._find_systems_resource() if resource['ret'] is False: diff --git a/ansible_collections/community/general/plugins/modules/redis_info.py b/ansible_collections/community/general/plugins/modules/redis_info.py index f352d53d7..c75abcf21 100644 --- a/ansible_collections/community/general/plugins/modules/redis_info.py +++ b/ansible_collections/community/general/plugins/modules/redis_info.py @@ -30,6 +30,11 @@ options: version_added: 7.5.0 ca_certs: version_added: 7.5.0 + cluster: + default: false + description: Get informations about cluster status as RV(cluster). + type: bool + version_added: 9.1.0 seealso: - module: community.general.redis author: "Pavlo Bashynskyi (@levonet)" @@ -43,6 +48,15 @@ EXAMPLES = r''' - name: Print server information ansible.builtin.debug: var: result.info + +- name: Get server cluster information + community.general.redis_info: + cluster: true + register: result + +- name: Print server cluster information + ansible.builtin.debug: + var: result.cluster_info ''' RETURN = r''' @@ -178,6 +192,25 @@ info: "used_memory_scripts_human": "0B", "used_memory_startup": 791264 } +cluster: + description: The default set of cluster information sections U(https://redis.io/commands/cluster-info). + returned: success if O(cluster=true) + version_added: 9.1.0 + type: dict + sample: { + "cluster_state": ok, + "cluster_slots_assigned": 16384, + "cluster_slots_ok": 16384, + "cluster_slots_pfail": 0, + "cluster_slots_fail": 0, + "cluster_known_nodes": 6, + "cluster_size": 3, + "cluster_current_epoch": 6, + "cluster_my_epoch": 2, + "cluster_stats_messages_sent": 1483972, + "cluster_stats_messages_received": 1483968, + "total_cluster_links_buffer_limit_exceeded": 0 + } ''' import traceback @@ -202,14 +235,19 @@ def redis_client(**client_params): # Module execution. def main(): + module_args = dict( + cluster=dict(type='bool', default=False), + ) + module_args.update(redis_auth_argument_spec(tls_default=False)) module = AnsibleModule( - argument_spec=redis_auth_argument_spec(tls_default=False), + argument_spec=module_args, supports_check_mode=True, ) fail_imports(module, module.params['tls']) redis_params = redis_auth_params(module) + cluster = module.params['cluster'] # Connect and check client = redis_client(**redis_params) @@ -219,7 +257,13 @@ def main(): module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc()) info = client.info() - module.exit_json(changed=False, info=info) + + result = dict(changed=False, info=info) + + if cluster: + result['cluster_info'] = client.execute_command('CLUSTER INFO') + + module.exit_json(**result) if __name__ == '__main__': diff --git a/ansible_collections/community/general/plugins/modules/udm_user.py b/ansible_collections/community/general/plugins/modules/udm_user.py index dcbf0ec85..5a2e09049 100644 --- a/ansible_collections/community/general/plugins/modules/udm_user.py +++ b/ansible_collections/community/general/plugins/modules/udm_user.py @@ -20,6 +20,12 @@ description: - "This module allows to manage posix users on a univention corporate server (UCS). It uses the python API of the UCS to create a new object or edit it." +notes: + - This module does B(not) work with Python 3.13 or newer. It uses the deprecated L(crypt Python module, + https://docs.python.org/3.12/library/crypt.html) from the Python standard library, which was removed + from Python 3.13. +requirements: + - Python 3.12 or earlier extends_documentation_fragment: - community.general.attributes attributes: @@ -324,10 +330,10 @@ EXAMPLES = ''' RETURN = '''# ''' -import crypt from datetime import date, timedelta +import traceback -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible_collections.community.general.plugins.module_utils.univention_umc import ( umc_module_for_add, umc_module_for_edit, @@ -335,6 +341,15 @@ from ansible_collections.community.general.plugins.module_utils.univention_umc i base_dn, ) +try: + import crypt +except ImportError: + HAS_CRYPT = False + CRYPT_IMPORT_ERROR = traceback.format_exc() +else: + HAS_CRYPT = True + CRYPT_IMPORT_ERROR = None + def main(): expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d") @@ -451,6 +466,13 @@ def main(): ('state', 'present', ['firstname', 'lastname', 'password']) ]) ) + + if not HAS_CRYPT: + module.fail_json( + msg=missing_required_lib('crypt (part of Python 3.13 standard library)'), + exception=CRYPT_IMPORT_ERROR, + ) + username = module.params['username'] position = module.params['position'] ou = module.params['ou'] diff --git a/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py b/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py new file mode 100644 index 000000000..94234a15d --- /dev/null +++ b/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py @@ -0,0 +1,141 @@ +# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com> +# Copyright (c) 2024 Felix Fontein <felix@fontein.de> +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re + +from ansible.errors import AnsibleFilterError +from ansible.module_utils.six import string_types +from ansible.module_utils.common._collections_compat import Mapping, Sequence + + +def _keys_filter_params(data, matching_parameter): + """test parameters: + * data must be a list of dictionaries. All keys must be strings. + * matching_parameter is member of a list. + """ + + mp = matching_parameter + ml = ['equal', 'starts_with', 'ends_with', 'regex'] + + if not isinstance(data, Sequence): + msg = "First argument must be a list. %s is %s" + raise AnsibleFilterError(msg % (data, type(data))) + + for elem in data: + if not isinstance(elem, Mapping): + msg = "The data items must be dictionaries. %s is %s" + raise AnsibleFilterError(msg % (elem, type(elem))) + + for elem in data: + if not all(isinstance(item, string_types) for item in elem.keys()): + msg = "Top level keys must be strings. keys: %s" + raise AnsibleFilterError(msg % elem.keys()) + + if mp not in ml: + msg = "The matching_parameter must be one of %s. matching_parameter=%s" + raise AnsibleFilterError(msg % (ml, mp)) + + return + + +def _keys_filter_target_str(target, matching_parameter): + """ + Test: + * target is a non-empty string or list. + * If target is list all items are strings. + * target is a string or list with single string if matching_parameter=regex. + Convert target and return: + * tuple of unique target items, or + * tuple with single item, or + * compiled regex if matching_parameter=regex. + """ + + if not isinstance(target, Sequence): + msg = "The target must be a string or a list. target is %s." + raise AnsibleFilterError(msg % type(target)) + + if len(target) == 0: + msg = "The target can't be empty." + raise AnsibleFilterError(msg) + + if isinstance(target, list): + for elem in target: + if not isinstance(elem, string_types): + msg = "The target items must be strings. %s is %s" + raise AnsibleFilterError(msg % (elem, type(elem))) + + if matching_parameter == 'regex': + if isinstance(target, string_types): + r = target + else: + if len(target) > 1: + msg = "Single item is required in the target list if matching_parameter=regex." + raise AnsibleFilterError(msg) + else: + r = target[0] + try: + tt = re.compile(r) + except re.error: + msg = "The target must be a valid regex if matching_parameter=regex. target is %s" + raise AnsibleFilterError(msg % r) + elif isinstance(target, string_types): + tt = (target, ) + else: + tt = tuple(set(target)) + + return tt + + +def _keys_filter_target_dict(target, matching_parameter): + """ + Test: + * target is a list of dictionaries with attributes 'after' and 'before'. + * Attributes 'before' must be valid regex if matching_parameter=regex. + * Otherwise, the attributes 'before' must be strings. + Convert target and return: + * iterator that aggregates attributes 'before' and 'after', or + * iterator that aggregates compiled regex of attributes 'before' and 'after' if matching_parameter=regex. + """ + + if not isinstance(target, list): + msg = "The target must be a list. target is %s." + raise AnsibleFilterError(msg % (target, type(target))) + + if len(target) == 0: + msg = "The target can't be empty." + raise AnsibleFilterError(msg) + + for elem in target: + if not isinstance(elem, Mapping): + msg = "The target items must be dictionaries. %s is %s" + raise AnsibleFilterError(msg % (elem, type(elem))) + if not all(k in elem for k in ('before', 'after')): + msg = "All dictionaries in target must include attributes: after, before." + raise AnsibleFilterError(msg) + if not isinstance(elem['before'], string_types): + msg = "The attributes before must be strings. %s is %s" + raise AnsibleFilterError(msg % (elem['before'], type(elem['before']))) + if not isinstance(elem['after'], string_types): + msg = "The attributes after must be strings. %s is %s" + raise AnsibleFilterError(msg % (elem['after'], type(elem['after']))) + + before = [d['before'] for d in target] + after = [d['after'] for d in target] + + if matching_parameter == 'regex': + try: + tr = map(re.compile, before) + tz = list(zip(tr, after)) + except re.error: + msg = ("The attributes before must be valid regex if matching_parameter=regex." + " Not all items are valid regex in: %s") + raise AnsibleFilterError(msg % before) + else: + tz = list(zip(before, after)) + + return tz |