summaryrefslogtreecommitdiffstats
path: root/ansible_collections/amazon/aws/tests/unit
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-18 05:52:22 +0000
commit38b7c80217c4e72b1d8988eb1e60bb6e77334114 (patch)
tree356e9fd3762877d07cde52d21e77070aeff7e789 /ansible_collections/amazon/aws/tests/unit
parentAdding upstream version 7.7.0+dfsg. (diff)
downloadansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.tar.xz
ansible-38b7c80217c4e72b1d8988eb1e60bb6e77334114.zip
Adding upstream version 9.4.0+dfsg.upstream/9.4.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/amazon/aws/tests/unit')
-rw-r--r--ansible_collections/amazon/aws/tests/unit/__init__.py (renamed from ansible_collections/amazon/aws/tests/unit/compat/__init__.py)0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/compat/builtins.py33
-rw-r--r--ansible_collections/amazon/aws/tests/unit/compat/mock.py122
-rw-r--r--ansible_collections/amazon/aws/tests/unit/compat/unittest.py38
-rw-r--r--ansible_collections/amazon/aws/tests/unit/constraints.txt6
-rw-r--r--ansible_collections/amazon/aws/tests/unit/mock/loader.py116
-rw-r--r--ansible_collections/amazon/aws/tests/unit/mock/path.py8
-rw-r--r--ansible_collections/amazon/aws/tests/unit/mock/procenv.py90
-rw-r--r--ansible_collections/amazon/aws/tests/unit/mock/vault_helper.py39
-rw-r--r--ansible_collections/amazon/aws/tests/unit/mock/yaml_helper.py124
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/arn/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_is_outpost_arn.py3
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_parse_aws_arn.py301
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_validate_aws_arn.py217
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_aws_region.py199
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_boto3_conn.py114
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_connection_info.py345
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_code.py95
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_message.py79
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_merge_botocore_config.py68
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_normalize_boto3_result.py53
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_sdk_versions.py250
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_backoff_iterator.py3
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_cloud_retry.py69
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_decorator_generation.py74
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retries_found.py29
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retry_func.py16
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/conftest.py38
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_listener_rules.py740
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_prune.py171
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_common_handler.py87
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_deletion_handler.py125
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_list_handler.py128
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/test_exceptions.py101
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_iam_error_handler.py131
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_validate_iam_identifiers.py83
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_fail_json_aws.py24
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_minimal_versions.py39
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_passthrough.py209
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_require_at_least.py82
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_canonicalize.py38
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_compare_policies.py278
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_py3cmp.py40
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_simple_hashable_policy.py28
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_sort_json_policy_dict.py61
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/retries/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_awsretry.py46
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_botocore_exception_maybe.py18
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_retry_wrapper.py267
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_acm.py348
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_cloudfront_facts.py487
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_elbv2.py109
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_get_aws_account_id.py (renamed from ansible_collections/amazon/aws/tests/unit/module_utils/test_iam.py)215
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_rds.py281
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_s3.py335
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_tagging.py148
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/test_tower.py21
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/transformation/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_ansible_dict_to_boto3_filter_list.py48
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_map_complex_type.py83
-rw-r--r--ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_scrub_none_parameters.py182
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/base/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/base/test_plugin.py177
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_boto3_conn_plugin.py131
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_aws_region.py84
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_connection_info.py83
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/test_connection_base.py49
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_base.py67
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_clients.py103
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/test_lookup_base.py48
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/inventory/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_ec2.py815
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_rds.py674
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/lookup/test_secretsmanager_secret.py348
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/conftest.py21
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/test_check_is_instance.py65
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_build_run_instance_spec.py140
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_determine_iam_role.py65
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_expand_rules.py240
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_formatting.py239
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_get_target_from_rule.py99
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_ip.py85
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_rule.py100
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/certs/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_backup_restore_job_info.py146
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_cloudformation.py142
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami.py364
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami_info.py224
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_eni_info.py108
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_import_image.py224
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_key.py353
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_metadata_facts.py101
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_security_group.py86
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_snapshot_info.py128
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_vpc_dhcp_option.py83
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_kms_key.py16
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer.py340
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer_info.py314
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_rds_instance_info.py121
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/test_s3_object.py169
-rw-r--r--ansible_collections/amazon/aws/tests/unit/plugins/modules/utils.py26
-rw-r--r--ansible_collections/amazon/aws/tests/unit/utils/__init__.py0
-rw-r--r--ansible_collections/amazon/aws/tests/unit/utils/amazon_placebo_fixtures.py118
121 files changed, 11130 insertions, 3048 deletions
diff --git a/ansible_collections/amazon/aws/tests/unit/compat/__init__.py b/ansible_collections/amazon/aws/tests/unit/__init__.py
index e69de29bb..e69de29bb 100644
--- a/ansible_collections/amazon/aws/tests/unit/compat/__init__.py
+++ b/ansible_collections/amazon/aws/tests/unit/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/compat/builtins.py b/ansible_collections/amazon/aws/tests/unit/compat/builtins.py
deleted file mode 100644
index 349d310e8..000000000
--- a/ansible_collections/amazon/aws/tests/unit/compat/builtins.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-#
-# Compat for python2.7
-#
-
-# One unittest needs to import builtins via __import__() so we need to have
-# the string that represents it
-try:
- import __builtin__ # pylint: disable=unused-import
-except ImportError:
- BUILTINS = 'builtins'
-else:
- BUILTINS = '__builtin__'
diff --git a/ansible_collections/amazon/aws/tests/unit/compat/mock.py b/ansible_collections/amazon/aws/tests/unit/compat/mock.py
deleted file mode 100644
index 0972cd2e8..000000000
--- a/ansible_collections/amazon/aws/tests/unit/compat/mock.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-'''
-Compat module for Python3.x's unittest.mock module
-'''
-import sys
-
-# Python 2.7
-
-# Note: Could use the pypi mock library on python3.x as well as python2.x. It
-# is the same as the python3 stdlib mock library
-
-try:
- # Allow wildcard import because we really do want to import all of mock's
- # symbols into this compat shim
- # pylint: disable=wildcard-import,unused-wildcard-import
- from unittest.mock import *
-except ImportError:
- # Python 2
- # pylint: disable=wildcard-import,unused-wildcard-import
- try:
- from mock import *
- except ImportError:
- print('You need the mock library installed on python2.x to run tests')
-
-
-# Prior to 3.4.4, mock_open cannot handle binary read_data
-if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
- file_spec = None
-
- def _iterate_read_data(read_data):
- # Helper for mock_open:
- # Retrieve lines from read_data via a generator so that separate calls to
- # readline, read, and readlines are properly interleaved
- sep = b'\n' if isinstance(read_data, bytes) else '\n'
- data_as_list = [l + sep for l in read_data.split(sep)]
-
- if data_as_list[-1] == sep:
- # If the last line ended in a newline, the list comprehension will have an
- # extra entry that's just a newline. Remove this.
- data_as_list = data_as_list[:-1]
- else:
- # If there wasn't an extra newline by itself, then the file being
- # emulated doesn't have a newline to end the last line remove the
- # newline that our naive format() added
- data_as_list[-1] = data_as_list[-1][:-1]
-
- for line in data_as_list:
- yield line
-
- def mock_open(mock=None, read_data=''):
- """
- A helper function to create a mock to replace the use of `open`. It works
- for `open` called directly or used as a context manager.
-
- The `mock` argument is the mock object to configure. If `None` (the
- default) then a `MagicMock` will be created for you, with the API limited
- to methods or attributes available on standard file handles.
-
- `read_data` is a string for the `read` methoddline`, and `readlines` of the
- file handle to return. This is an empty string by default.
- """
- def _readlines_side_effect(*args, **kwargs):
- if handle.readlines.return_value is not None:
- return handle.readlines.return_value
- return list(_data)
-
- def _read_side_effect(*args, **kwargs):
- if handle.read.return_value is not None:
- return handle.read.return_value
- return type(read_data)().join(_data)
-
- def _readline_side_effect():
- if handle.readline.return_value is not None:
- while True:
- yield handle.readline.return_value
- for line in _data:
- yield line
-
- global file_spec
- if file_spec is None:
- import _io
- file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
-
- if mock is None:
- mock = MagicMock(name='open', spec=open)
-
- handle = MagicMock(spec=file_spec)
- handle.__enter__.return_value = handle
-
- _data = _iterate_read_data(read_data)
-
- handle.write.return_value = None
- handle.read.return_value = None
- handle.readline.return_value = None
- handle.readlines.return_value = None
-
- handle.read.side_effect = _read_side_effect
- handle.readline.side_effect = _readline_side_effect()
- handle.readlines.side_effect = _readlines_side_effect
-
- mock.return_value = handle
- return mock
diff --git a/ansible_collections/amazon/aws/tests/unit/compat/unittest.py b/ansible_collections/amazon/aws/tests/unit/compat/unittest.py
deleted file mode 100644
index 98f08ad6a..000000000
--- a/ansible_collections/amazon/aws/tests/unit/compat/unittest.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-'''
-Compat module for Python2.7's unittest module
-'''
-
-import sys
-
-# Allow wildcard import because we really do want to import all of
-# unittests's symbols into this compat shim
-# pylint: disable=wildcard-import,unused-wildcard-import
-if sys.version_info < (2, 7):
- try:
- # Need unittest2 on python2.6
- from unittest2 import *
- except ImportError:
- print('You need unittest2 installed on python2.6.x to run tests')
-else:
- from unittest import *
diff --git a/ansible_collections/amazon/aws/tests/unit/constraints.txt b/ansible_collections/amazon/aws/tests/unit/constraints.txt
index cd546e7c2..5708323f1 100644
--- a/ansible_collections/amazon/aws/tests/unit/constraints.txt
+++ b/ansible_collections/amazon/aws/tests/unit/constraints.txt
@@ -1,7 +1,7 @@
# Specifically run tests against the oldest versions that we support
-boto3==1.18.0
-botocore==1.21.0
+botocore==1.29.0
+boto3==1.26.0
# AWS CLI has `botocore==` dependencies, provide the one that matches botocore
# to avoid needing to download over a years worth of awscli wheels.
-awscli==1.20.0
+awscli==1.27.0
diff --git a/ansible_collections/amazon/aws/tests/unit/mock/loader.py b/ansible_collections/amazon/aws/tests/unit/mock/loader.py
deleted file mode 100644
index 00a584127..000000000
--- a/ansible_collections/amazon/aws/tests/unit/mock/loader.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from ansible.errors import AnsibleParserError
-from ansible.parsing.dataloader import DataLoader
-from ansible.module_utils._text import to_bytes, to_text
-
-
-class DictDataLoader(DataLoader):
-
- def __init__(self, file_mapping=None):
- file_mapping = {} if file_mapping is None else file_mapping
- assert type(file_mapping) == dict
-
- super(DictDataLoader, self).__init__()
-
- self._file_mapping = file_mapping
- self._build_known_directories()
- self._vault_secrets = None
-
- def load_from_file(self, path, cache=True, unsafe=False):
- path = to_text(path)
- if path in self._file_mapping:
- return self.load(self._file_mapping[path], path)
- return None
-
- # TODO: the real _get_file_contents returns a bytestring, so we actually convert the
- # unicode/text it's created with to utf-8
- def _get_file_contents(self, file_name):
- file_name = to_text(file_name)
- if file_name in self._file_mapping:
- return (to_bytes(self._file_mapping[file_name]), False)
- else:
- raise AnsibleParserError("file not found: %s" % file_name)
-
- def path_exists(self, path):
- path = to_text(path)
- return path in self._file_mapping or path in self._known_directories
-
- def is_file(self, path):
- path = to_text(path)
- return path in self._file_mapping
-
- def is_directory(self, path):
- path = to_text(path)
- return path in self._known_directories
-
- def list_directory(self, path):
- ret = []
- path = to_text(path)
- for x in (list(self._file_mapping.keys()) + self._known_directories):
- if x.startswith(path):
- if os.path.dirname(x) == path:
- ret.append(os.path.basename(x))
- return ret
-
- def is_executable(self, path):
- # FIXME: figure out a way to make paths return true for this
- return False
-
- def _add_known_directory(self, directory):
- if directory not in self._known_directories:
- self._known_directories.append(directory)
-
- def _build_known_directories(self):
- self._known_directories = []
- for path in self._file_mapping:
- dirname = os.path.dirname(path)
- while dirname not in ('/', ''):
- self._add_known_directory(dirname)
- dirname = os.path.dirname(dirname)
-
- def push(self, path, content):
- rebuild_dirs = False
- if path not in self._file_mapping:
- rebuild_dirs = True
-
- self._file_mapping[path] = content
-
- if rebuild_dirs:
- self._build_known_directories()
-
- def pop(self, path):
- if path in self._file_mapping:
- del self._file_mapping[path]
- self._build_known_directories()
-
- def clear(self):
- self._file_mapping = dict()
- self._known_directories = []
-
- def get_basedir(self):
- return os.getcwd()
-
- def set_vault_secrets(self, vault_secrets):
- self._vault_secrets = vault_secrets
diff --git a/ansible_collections/amazon/aws/tests/unit/mock/path.py b/ansible_collections/amazon/aws/tests/unit/mock/path.py
deleted file mode 100644
index 8de2aec25..000000000
--- a/ansible_collections/amazon/aws/tests/unit/mock/path.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
-from ansible.utils.path import unfrackpath
-
-
-mock_unfrackpath_noop = MagicMock(spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x)
diff --git a/ansible_collections/amazon/aws/tests/unit/mock/procenv.py b/ansible_collections/amazon/aws/tests/unit/mock/procenv.py
deleted file mode 100644
index 273959e4b..000000000
--- a/ansible_collections/amazon/aws/tests/unit/mock/procenv.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# (c) 2016, Matt Davis <mdavis@ansible.com>
-# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import sys
-import json
-
-from contextlib import contextmanager
-from io import BytesIO, StringIO
-from ansible_collections.amazon.aws.tests.unit.compat import unittest
-from ansible.module_utils.six import PY3
-from ansible.module_utils._text import to_bytes
-
-
-@contextmanager
-def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
- """
- context manager that temporarily masks the test runner's values for stdin and argv
- """
- real_stdin = sys.stdin
- real_argv = sys.argv
-
- if PY3:
- fake_stream = StringIO(stdin_data)
- fake_stream.buffer = BytesIO(to_bytes(stdin_data))
- else:
- fake_stream = BytesIO(to_bytes(stdin_data))
-
- try:
- sys.stdin = fake_stream
- sys.argv = argv_data
-
- yield
- finally:
- sys.stdin = real_stdin
- sys.argv = real_argv
-
-
-@contextmanager
-def swap_stdout():
- """
- context manager that temporarily replaces stdout for tests that need to verify output
- """
- old_stdout = sys.stdout
-
- if PY3:
- fake_stream = StringIO()
- else:
- fake_stream = BytesIO()
-
- try:
- sys.stdout = fake_stream
-
- yield fake_stream
- finally:
- sys.stdout = old_stdout
-
-
-class ModuleTestCase(unittest.TestCase):
- def setUp(self, module_args=None):
- if module_args is None:
- module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
-
- args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
-
- # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
- self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
- self.stdin_swap.__enter__()
-
- def tearDown(self):
- # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
- self.stdin_swap.__exit__(None, None, None)
diff --git a/ansible_collections/amazon/aws/tests/unit/mock/vault_helper.py b/ansible_collections/amazon/aws/tests/unit/mock/vault_helper.py
deleted file mode 100644
index dcce9c784..000000000
--- a/ansible_collections/amazon/aws/tests/unit/mock/vault_helper.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.module_utils._text import to_bytes
-
-from ansible.parsing.vault import VaultSecret
-
-
-class TextVaultSecret(VaultSecret):
- '''A secret piece of text. ie, a password. Tracks text encoding.
-
- The text encoding of the text may not be the default text encoding so
- we keep track of the encoding so we encode it to the same bytes.'''
-
- def __init__(self, text, encoding=None, errors=None, _bytes=None):
- super(TextVaultSecret, self).__init__()
- self.text = text
- self.encoding = encoding or 'utf-8'
- self._bytes = _bytes
- self.errors = errors or 'strict'
-
- @property
- def bytes(self):
- '''The text encoded with encoding, unless we specifically set _bytes.'''
- return self._bytes or to_bytes(self.text, encoding=self.encoding, errors=self.errors)
diff --git a/ansible_collections/amazon/aws/tests/unit/mock/yaml_helper.py b/ansible_collections/amazon/aws/tests/unit/mock/yaml_helper.py
deleted file mode 100644
index 1ef172159..000000000
--- a/ansible_collections/amazon/aws/tests/unit/mock/yaml_helper.py
+++ /dev/null
@@ -1,124 +0,0 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import io
-import yaml
-
-from ansible.module_utils.six import PY3
-from ansible.parsing.yaml.loader import AnsibleLoader
-from ansible.parsing.yaml.dumper import AnsibleDumper
-
-
-class YamlTestUtils(object):
- """Mixin class to combine with a unittest.TestCase subclass."""
- def _loader(self, stream):
- """Vault related tests will want to override this.
-
- Vault cases should setup a AnsibleLoader that has the vault password."""
- return AnsibleLoader(stream)
-
- def _dump_stream(self, obj, stream, dumper=None):
- """Dump to a py2-unicode or py3-string stream."""
- if PY3:
- return yaml.dump(obj, stream, Dumper=dumper)
- else:
- return yaml.dump(obj, stream, Dumper=dumper, encoding=None)
-
- def _dump_string(self, obj, dumper=None):
- """Dump to a py2-unicode or py3-string"""
- if PY3:
- return yaml.dump(obj, Dumper=dumper)
- else:
- return yaml.dump(obj, Dumper=dumper, encoding=None)
-
- def _dump_load_cycle(self, obj):
- # Each pass though a dump or load revs the 'generation'
- # obj to yaml string
- string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper)
-
- # wrap a stream/file like StringIO around that yaml
- stream_from_object_dump = io.StringIO(string_from_object_dump)
- loader = self._loader(stream_from_object_dump)
- # load the yaml stream to create a new instance of the object (gen 2)
- obj_2 = loader.get_data()
-
- # dump the gen 2 objects directory to strings
- string_from_object_dump_2 = self._dump_string(obj_2,
- dumper=AnsibleDumper)
-
- # The gen 1 and gen 2 yaml strings
- self.assertEqual(string_from_object_dump, string_from_object_dump_2)
- # the gen 1 (orig) and gen 2 py object
- self.assertEqual(obj, obj_2)
-
- # again! gen 3... load strings into py objects
- stream_3 = io.StringIO(string_from_object_dump_2)
- loader_3 = self._loader(stream_3)
- obj_3 = loader_3.get_data()
-
- string_from_object_dump_3 = self._dump_string(obj_3, dumper=AnsibleDumper)
-
- self.assertEqual(obj, obj_3)
- # should be transitive, but...
- self.assertEqual(obj_2, obj_3)
- self.assertEqual(string_from_object_dump, string_from_object_dump_3)
-
- def _old_dump_load_cycle(self, obj):
- '''Dump the passed in object to yaml, load it back up, dump again, compare.'''
- stream = io.StringIO()
-
- yaml_string = self._dump_string(obj, dumper=AnsibleDumper)
- self._dump_stream(obj, stream, dumper=AnsibleDumper)
-
- yaml_string_from_stream = stream.getvalue()
-
- # reset stream
- stream.seek(0)
-
- loader = self._loader(stream)
- # loader = AnsibleLoader(stream, vault_password=self.vault_password)
- obj_from_stream = loader.get_data()
-
- stream_from_string = io.StringIO(yaml_string)
- loader2 = self._loader(stream_from_string)
- # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)
- obj_from_string = loader2.get_data()
-
- stream_obj_from_stream = io.StringIO()
- stream_obj_from_string = io.StringIO()
-
- if PY3:
- yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper)
- yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper)
- else:
- yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper, encoding=None)
- yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper, encoding=None)
-
- yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue()
- yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue()
-
- stream_obj_from_stream.seek(0)
- stream_obj_from_string.seek(0)
-
- if PY3:
- yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper)
- yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
- else:
- yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper, encoding=None)
- yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper, encoding=None)
-
- assert yaml_string == yaml_string_obj_from_stream
- assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
- assert (yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string == yaml_string_stream_obj_from_stream ==
- yaml_string_stream_obj_from_string)
- assert obj == obj_from_stream
- assert obj == obj_from_string
- assert obj == yaml_string_obj_from_stream
- assert obj == yaml_string_obj_from_string
- assert obj == obj_from_stream == obj_from_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
- return {'obj': obj,
- 'yaml_string': yaml_string,
- 'yaml_string_from_stream': yaml_string_from_stream,
- 'obj_from_stream': obj_from_stream,
- 'obj_from_string': obj_from_string,
- 'yaml_string_obj_from_string': yaml_string_obj_from_string}
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_is_outpost_arn.py b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_is_outpost_arn.py
index 7c2e21eb2..8b92c4cca 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_is_outpost_arn.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_is_outpost_arn.py
@@ -3,9 +3,6 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
from ansible_collections.amazon.aws.plugins.module_utils.arn import is_outpost_arn
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_parse_aws_arn.py b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_parse_aws_arn.py
index 87dada4a9..cc4b40576 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_parse_aws_arn.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_parse_aws_arn.py
@@ -3,82 +3,263 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
from ansible_collections.amazon.aws.plugins.module_utils.arn import parse_aws_arn
arn_bad_values = [
- ("arn:aws:outpost:us-east-1: 123456789012:outpost/op-1234567890abcdef0"),
- ("arn:aws:out post:us-east-1:123456789012:outpost/op-1234567890abcdef0"),
- ("arn:aws:outpost:us east 1:123456789012:outpost/op-1234567890abcdef0"),
- ("invalid:aws:outpost:us-east-1:123456789012:outpost/op-1234567890abcdef0"),
- ("arn:junk:outpost:us-east-1:123456789012:outpost/op-1234567890abcdef0"),
- ("arn:aws:outpost:us-east-1:junk:outpost/op-1234567890abcdef0"),
+ "arn:aws:outpost:us-east-1: 123456789012:outpost/op-1234567890abcdef0",
+ "arn:aws:out post:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ "arn:aws:outpost:us east 1:123456789012:outpost/op-1234567890abcdef0",
+ "invalid:aws:outpost:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ "arn:junk:outpost:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ "arn:aws:outpost:us-east-1:junk:outpost/op-1234567890abcdef0",
]
arn_good_values = [
# Play about with partition name in valid ways
- dict(partition='aws', service='outpost', region='us-east-1', account_id='123456789012',
- resource='outpost/op-1234567890abcdef0'),
- dict(partition='aws-gov', service='outpost', region='us-gov-east-1', account_id='123456789012',
- resource='outpost/op-1234567890abcdef0'),
- dict(partition='aws-cn', service='outpost', region='us-east-1', account_id='123456789012',
- resource='outpost/op-1234567890abcdef0'),
+ dict(
+ partition="aws",
+ service="outpost",
+ region="us-east-1",
+ account_id="123456789012",
+ resource="outpost/op-1234567890abcdef0",
+ resource_type="outpost",
+ resource_id="op-1234567890abcdef0",
+ ),
+ dict(
+ partition="aws-gov",
+ service="outpost",
+ region="us-gov-east-1",
+ account_id="123456789012",
+ resource="outpost/op-1234567890abcdef0",
+ resource_type="outpost",
+ resource_id="op-1234567890abcdef0",
+ ),
+ dict(
+ partition="aws-cn",
+ service="outpost",
+ region="us-east-1",
+ account_id="123456789012",
+ resource="outpost/op-1234567890abcdef0",
+ resource_type="outpost",
+ resource_id="op-1234567890abcdef0",
+ ),
# Start the account ID with 0s, it's a 12 digit *string*, if someone treats
# it as an integer the leading 0s can disappear.
- dict(partition='aws-cn', service='outpost', region='us-east-1', account_id='000123000123',
- resource='outpost/op-1234567890abcdef0'),
+ dict(
+ partition="aws-cn",
+ service="outpost",
+ region="us-east-1",
+ account_id="000123000123",
+ resource="outpost/op-1234567890abcdef0",
+ resource_type="outpost",
+ resource_id="op-1234567890abcdef0",
+ ),
# S3 doesn't "need" region/account_id as bucket names are globally unique
- dict(partition='aws', service='s3', region='', account_id='', resource='bucket/object'),
+ dict(
+ partition="aws",
+ service="s3",
+ region="",
+ account_id="",
+ resource="bucket/object",
+ resource_type="bucket",
+ resource_id="object",
+ ),
# IAM is a 'global' service, so the ARNs don't have regions
- dict(partition='aws', service='iam', region='', account_id='123456789012',
- resource='policy/foo/bar/PolicyName'),
- dict(partition='aws', service='iam', region='', account_id='123456789012',
- resource='instance-profile/ExampleProfile'),
- dict(partition='aws', service='iam', region='', account_id='123456789012', resource='root'),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="policy/foo/bar/PolicyName",
+ resource_type="policy",
+ resource_id="foo/bar/PolicyName",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="instance-profile/ExampleProfile",
+ resource_type="instance-profile",
+ resource_id="ExampleProfile",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="root",
+ resource_type=None,
+ resource_id="root",
+ ),
# Some examples with different regions
- dict(partition='aws', service='sqs', region='eu-west-3', account_id='123456789012',
- resource='example-queue'),
- dict(partition='aws', service='sqs', region='us-gov-east-1', account_id='123456789012',
- resource='example-queue'),
- dict(partition='aws', service='sqs', region='sa-east-1', account_id='123456789012',
- resource='example-queue'),
- dict(partition='aws', service='sqs', region='ap-northeast-2', account_id='123456789012',
- resource='example-queue'),
- dict(partition='aws', service='sqs', region='ca-central-1', account_id='123456789012',
- resource='example-queue'),
+ dict(
+ partition="aws",
+ service="sqs",
+ region="eu-west-3",
+ account_id="123456789012",
+ resource="example-queue",
+ resource_type=None,
+ resource_id="example-queue",
+ ),
+ dict(
+ partition="aws",
+ service="sqs",
+ region="us-gov-east-1",
+ account_id="123456789012",
+ resource="example-queue",
+ resource_type=None,
+ resource_id="example-queue",
+ ),
+ dict(
+ partition="aws",
+ service="sqs",
+ region="sa-east-1",
+ account_id="123456789012",
+ resource="example-queue",
+ resource_type=None,
+ resource_id="example-queue",
+ ),
+ dict(
+ partition="aws",
+ service="sqs",
+ region="ap-northeast-2",
+ account_id="123456789012",
+ resource="example-queue",
+ resource_type=None,
+ resource_id="example-queue",
+ ),
+ dict(
+ partition="aws",
+ service="sqs",
+ region="ca-central-1",
+ account_id="123456789012",
+ resource="example-queue",
+ resource_type=None,
+ resource_id="example-queue",
+ ),
# Some more unusual service names
- dict(partition='aws', service='network-firewall', region='us-east-1', account_id='123456789012',
- resource='stateful-rulegroup/ExampleDomainList'),
- dict(partition='aws', service='resource-groups', region='us-east-1', account_id='123456789012',
- resource='group/group-name'),
+ dict(
+ partition="aws",
+ service="network-firewall",
+ region="us-east-1",
+ account_id="123456789012",
+ resource="stateful-rulegroup/ExampleDomainList",
+ resource_type="stateful-rulegroup",
+ resource_id="ExampleDomainList",
+ ),
+ dict(
+ partition="aws",
+ service="resource-groups",
+ region="us-east-1",
+ account_id="123456789012",
+ resource="group/group-name",
+ resource_type="group",
+ resource_id="group-name",
+ ),
# A special case for resources AWS curate
- dict(partition='aws', service='network-firewall', region='us-east-1', account_id='aws-managed',
- resource='stateful-rulegroup/BotNetCommandAndControlDomainsActionOrder'),
- dict(partition='aws', service='iam', region='', account_id='aws',
- resource='policy/AWSDirectConnectReadOnlyAccess'),
+ dict(
+ partition="aws",
+ service="network-firewall",
+ region="us-east-1",
+ account_id="aws-managed",
+ resource="stateful-rulegroup/BotNetCommandAndControlDomainsActionOrder",
+ resource_type="stateful-rulegroup",
+ resource_id="BotNetCommandAndControlDomainsActionOrder",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="aws",
+ resource="policy/AWSDirectConnectReadOnlyAccess",
+ resource_type="policy",
+ resource_id="AWSDirectConnectReadOnlyAccess",
+ ),
# Examples merged in from test_arn.py
- dict(partition="aws-us-gov", service="iam", region="", account_id="0123456789",
- resource="role/foo-role"),
- dict(partition="aws", service='iam', region="", account_id="123456789012",
- resource="user/dev/*"),
- dict(partition="aws", service="iam", region="", account_id="123456789012",
- resource="user:test"),
- dict(partition="aws-cn", service="iam", region="", account_id="123456789012",
- resource="user:test"),
- dict(partition="aws", service="iam", region="", account_id="123456789012",
- resource="user"),
- dict(partition="aws", service="s3", region="", account_id="",
- resource="my_corporate_bucket/*"),
- dict(partition="aws", service="s3", region="", account_id="",
- resource="my_corporate_bucket/Development/*"),
- dict(partition="aws", service="rds", region="es-east-1", account_id="000000000000",
- resource="snapshot:rds:my-db-snapshot"),
- dict(partition="aws", service="cloudformation", region="us-east-1", account_id="012345678901",
- resource="changeSet/Ansible-StackName-c6884247ede41eb0"),
+ dict(
+ partition="aws-us-gov",
+ service="iam",
+ region="",
+ account_id="0123456789",
+ resource="role/foo-role",
+ resource_type="role",
+ resource_id="foo-role",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="user/dev/*",
+ resource_type="user",
+ resource_id="dev/*",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="user:test",
+ resource_type="user",
+ resource_id="test",
+ ),
+ dict(
+ partition="aws-cn",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="user:test",
+ resource_type="user",
+ resource_id="test",
+ ),
+ dict(
+ partition="aws",
+ service="iam",
+ region="",
+ account_id="123456789012",
+ resource="user",
+ resource_type=None,
+ resource_id="user",
+ ),
+ dict(
+ partition="aws",
+ service="s3",
+ region="",
+ account_id="",
+ resource="my_corporate_bucket/*",
+ resource_type="my_corporate_bucket",
+ resource_id="*",
+ ),
+ dict(
+ partition="aws",
+ service="s3",
+ region="",
+ account_id="",
+ resource="my_corporate_bucket/Development/*",
+ resource_type="my_corporate_bucket",
+ resource_id="Development/*",
+ ),
+ dict(
+ partition="aws",
+ service="rds",
+ region="es-east-1",
+ account_id="000000000000",
+ resource="snapshot:rds:my-db-snapshot",
+ resource_type="snapshot",
+ resource_id="rds:my-db-snapshot",
+ ),
+ dict(
+ partition="aws",
+ service="cloudformation",
+ region="us-east-1",
+ account_id="012345678901",
+ resource="changeSet/Ansible-StackName-c6884247ede41eb0",
+ resource_type="changeSet",
+ resource_id="Ansible-StackName-c6884247ede41eb0",
+ ),
]
@@ -91,5 +272,5 @@ def test_parse_aws_arn_bad_values(arn):
@pytest.mark.parametrize("result", arn_good_values)
def test_parse_aws_arn_good_values(result):
# Something of a cheat, but build the ARN from the result we expect
- arn = 'arn:{partition}:{service}:{region}:{account_id}:{resource}'.format(**result)
+ arn = "arn:{partition}:{service}:{region}:{account_id}:{resource}".format(**result)
assert parse_aws_arn(arn) == result
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_validate_aws_arn.py b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_validate_aws_arn.py
new file mode 100644
index 000000000..d730ee637
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/arn/test_validate_aws_arn.py
@@ -0,0 +1,217 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.arn import validate_aws_arn
+
+arn_test_inputs = [
+ # Just test it's a valid ARN
+ ("arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0", True, None),
+ # Bad ARN
+ ("arn:was:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0", False, None),
+ # Individual options
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"partition": "aws"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"partition": "aws-cn"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"service": "outposts"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"service": "iam"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"region": "us-east-1"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"region": "us-east-2"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"account_id": "123456789012"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"account_id": "111111111111"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"resource": "outpost/op-1234567890abcdef0"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"resource": "outpost/op-11111111111111111"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"resource_type": "outpost"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"resource_type": "notpost"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {"resource_id": "op-1234567890abcdef0"},
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {"resource_id": "op-11111111111111111"},
+ ),
+ (
+ "arn:aws:states:us-west-2:123456789012:stateMachine:HelloWorldStateMachine",
+ True,
+ {"resource_type": "stateMachine"},
+ ),
+ (
+ "arn:aws:states:us-west-2:123456789012:stateMachine:HelloWorldStateMachine",
+ False,
+ {"resource_type": "nopeMachine"},
+ ),
+ (
+ "arn:aws:states:us-west-2:123456789012:stateMachine:HelloWorldStateMachine",
+ True,
+ {"resource_id": "HelloWorldStateMachine"},
+ ),
+ (
+ "arn:aws:states:us-west-2:123456789012:stateMachine:HelloWorldStateMachine",
+ False,
+ {"resource_id": "CruelWorldStateMachine"},
+ ),
+ # All options
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ True,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws-cn",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "iam",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-2",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "111111111111",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-11111111111111111",
+ "resource_type": "outpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "notpost",
+ "resource_id": "op-1234567890abcdef0",
+ },
+ ),
+ (
+ "arn:aws:outposts:us-east-1:123456789012:outpost/op-1234567890abcdef0",
+ False,
+ {
+ "partition": "aws",
+ "service": "outposts",
+ "region": "us-east-1",
+ "account_id": "123456789012",
+ "resource": "outpost/op-1234567890abcdef0",
+ "resource_type": "outpost",
+ "resource_id": "op-11111111111111111",
+ },
+ ),
+]
+
+
+@pytest.mark.parametrize("arn, result, kwargs", arn_test_inputs)
+def test_validate_aws_arn(arn, result, kwargs):
+ kwargs = kwargs or {}
+ assert validate_aws_arn(arn, **kwargs) == result
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_aws_region.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_aws_region.py
new file mode 100644
index 000000000..f36967b44
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_aws_region.py
@@ -0,0 +1,199 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+import ansible_collections.amazon.aws.plugins.module_utils.botocore as utils_botocore
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleBotocoreError
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_module(monkeypatch):
+ aws_module = MagicMock()
+ aws_module.fail_json.side_effect = FailException()
+ aws_module.fail_json_aws.side_effect = FailException()
+ monkeypatch.setattr(aws_module, "params", sentinel.MODULE_PARAMS)
+ return aws_module
+
+
+@pytest.fixture
+def fake_botocore(monkeypatch):
+ # Note: this isn't a monkey-patched real-botocore, this is a complete fake.
+ fake_session = MagicMock()
+ fake_session.get_config_variable.return_value = sentinel.BOTO3_REGION
+ fake_session_module = MagicMock()
+ fake_session_module.Session.return_value = fake_session
+ fake_botocore = MagicMock()
+ monkeypatch.setattr(fake_botocore, "session", fake_session_module)
+ # Patch exceptions back in
+ monkeypatch.setattr(fake_botocore, "exceptions", botocore.exceptions)
+
+ return fake_botocore
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.get_aws_region
+###############################################################
+def test_get_aws_region_simple(monkeypatch, aws_module, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+ region_method.return_value = sentinel.RETURNED_REGION
+
+ assert botocore_utils.get_aws_region(aws_module) is sentinel.RETURNED_REGION
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # args[0]
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+
+def test_get_aws_region_exception_nested(monkeypatch, aws_module, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ region_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ assert botocore_utils.get_aws_region(aws_module)
+
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+ fail_args = aws_module.fail_json.call_args
+ assert fail_args == call(msg=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ # call_args[1] == kwargs
+ assert fail_args[1]["msg"] is sentinel.ERROR_MSG
+ assert fail_args[1]["exception"] is sentinel.ERROR_EX
+
+
+def test_get_aws_region_exception_msg(monkeypatch, aws_module, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG)
+ region_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ assert botocore_utils.get_aws_region(aws_module)
+
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+ fail_args = aws_module.fail_json.call_args
+ assert fail_args == call(msg=sentinel.ERROR_MSG)
+ # call_args[1] == kwargs
+ assert fail_args[1]["msg"] is sentinel.ERROR_MSG
+
+
+###############################################################
+# module_utils.botocore._aws_region
+###############################################################
+def test_aws_region_no_boto(monkeypatch, botocore_utils):
+ monkeypatch.setattr(botocore_utils, "HAS_BOTO3", False)
+ monkeypatch.setattr(botocore_utils, "BOTO3_IMP_ERR", sentinel.BOTO3_IMPORT_EXCEPTION)
+
+ assert botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION)) is sentinel.PARAM_REGION
+
+ with pytest.raises(AnsibleBotocoreError) as e:
+ utils_botocore._aws_region(dict())
+ assert "boto3" in e.value.message
+ assert "botocore" in e.value.message
+ assert e.value.exception is sentinel.BOTO3_IMPORT_EXCEPTION
+
+
+def test_aws_region_no_profile(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ fake_session_module = fake_botocore.session
+ fake_session = fake_session_module.Session(sentinel.RETRIEVAL)
+
+ assert botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION)) is sentinel.PARAM_REGION
+ assert fake_session_module.Session.call_args == call(sentinel.RETRIEVAL)
+
+ assert botocore_utils._aws_region(dict()) is sentinel.BOTO3_REGION
+ assert fake_session_module.Session.call_args == call(profile=None)
+ assert fake_session.get_config_variable.call_args == call("region")
+
+
+def test_aws_region_none_profile(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ fake_session_module = fake_botocore.session
+ fake_session = fake_session_module.Session(sentinel.RETRIEVAL)
+
+ assert botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION, profile=None)) is sentinel.PARAM_REGION
+ assert fake_session_module.Session.call_args == call(sentinel.RETRIEVAL)
+
+ assert utils_botocore._aws_region(dict(profile=None)) is sentinel.BOTO3_REGION
+ assert fake_session_module.Session.call_args == call(profile=None)
+ assert fake_session.get_config_variable.call_args == call("region")
+
+
+def test_aws_region_empty_profile(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ fake_session_module = fake_botocore.session
+ fake_session = fake_session_module.Session(sentinel.RETRIEVAL)
+
+ assert botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION, profile="")) is sentinel.PARAM_REGION
+ assert fake_session_module.Session.call_args == call(sentinel.RETRIEVAL)
+
+ assert utils_botocore._aws_region(dict(profile="")) is sentinel.BOTO3_REGION
+ assert fake_session_module.Session.call_args == call(profile=None)
+ assert fake_session.get_config_variable.call_args == call("region")
+
+
+def test_aws_region_with_profile(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ fake_session_module = fake_botocore.session
+ fake_session = fake_session_module.Session(sentinel.RETRIEVAL)
+
+ assert (
+ botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION, profile=sentinel.PARAM_PROFILE))
+ is sentinel.PARAM_REGION
+ )
+ assert fake_session_module.Session.call_args == call(sentinel.RETRIEVAL)
+
+ assert utils_botocore._aws_region(dict(profile=sentinel.PARAM_PROFILE)) is sentinel.BOTO3_REGION
+ assert fake_session_module.Session.call_args == call(profile=sentinel.PARAM_PROFILE)
+ assert fake_session.get_config_variable.call_args == call("region")
+
+
+def test_aws_region_bad_profile(monkeypatch, botocore_utils, fake_botocore):
+ not_found_exception = botocore.exceptions.ProfileNotFound(profile=sentinel.ERROR_PROFILE)
+
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ fake_session_module = fake_botocore.session
+
+ assert (
+ botocore_utils._aws_region(dict(region=sentinel.PARAM_REGION, profile=sentinel.PARAM_PROFILE))
+ is sentinel.PARAM_REGION
+ )
+ # We've always just returned a blank region if we're passed a bad profile.
+ # However, it's worth noting however that once someone tries to build a connection passing the
+ # bad profile name they'll see the ProfileNotFound exception
+ fake_session_module.Session.side_effect = not_found_exception
+ assert utils_botocore._aws_region(dict(profile=sentinel.PARAM_PROFILE)) is None
+ assert fake_session_module.Session.call_args == call(profile=sentinel.PARAM_PROFILE)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_boto3_conn.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_boto3_conn.py
new file mode 100644
index 000000000..d9b19b725
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_boto3_conn.py
@@ -0,0 +1,114 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.module_utils.botocore as utils_botocore
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_module(monkeypatch):
+ aws_module = MagicMock()
+ aws_module.fail_json.side_effect = FailException()
+ monkeypatch.setattr(aws_module, "_name", sentinel.MODULE_NAME)
+ return aws_module
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.boto3_conn
+###############################################################
+def test_boto3_conn_success(monkeypatch, aws_module, botocore_utils):
+ connection_method = MagicMock(name="_boto3_conn")
+ monkeypatch.setattr(botocore_utils, "_boto3_conn", connection_method)
+ connection_method.return_value = sentinel.RETURNED_CONNECTION
+
+ assert botocore_utils.boto3_conn(aws_module) is sentinel.RETURNED_CONNECTION
+ passed_args = connection_method.call_args
+ assert passed_args == call(conn_type=None, resource=None, region=None, endpoint=None)
+
+ result = botocore_utils.boto3_conn(
+ aws_module,
+ conn_type=sentinel.PARAM_CONNTYPE,
+ resource=sentinel.PARAM_RESOURCE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.PARAM_ENDPOINT,
+ extra_arg=sentinel.PARAM_EXTRA,
+ )
+ assert result is sentinel.RETURNED_CONNECTION
+ passed_args = connection_method.call_args
+ assert passed_args == call(
+ conn_type=sentinel.PARAM_CONNTYPE,
+ resource=sentinel.PARAM_RESOURCE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.PARAM_ENDPOINT,
+ extra_arg=sentinel.PARAM_EXTRA,
+ )
+
+
+@pytest.mark.parametrize(
+ "failure, custom_error",
+ [
+ (
+ ValueError(sentinel.VALUE_ERROR),
+ "Couldn't connect to AWS: sentinel.VALUE_ERROR",
+ ),
+ (
+ botocore.exceptions.ProfileNotFound(
+ profile=sentinel.PROFILE_ERROR,
+ ),
+ None,
+ ),
+ (
+ botocore.exceptions.PartialCredentialsError(
+ provider=sentinel.CRED_ERROR_PROV,
+ cred_var=sentinel.CRED_ERROR_VAR,
+ ),
+ None,
+ ),
+ (
+ botocore.exceptions.NoCredentialsError(),
+ None,
+ ),
+ (
+ botocore.exceptions.ConfigParseError(path=sentinel.PARSE_ERROR),
+ None,
+ ),
+ (
+ botocore.exceptions.NoRegionError(),
+ "The sentinel.MODULE_NAME module requires a region and none was found",
+ ),
+ ],
+)
+def test_boto3_conn_exception(monkeypatch, aws_module, botocore_utils, failure, custom_error):
+ connection_method = MagicMock(name="_boto3_conn")
+ monkeypatch.setattr(botocore_utils, "_boto3_conn", connection_method)
+ connection_method.side_effect = failure
+
+ if custom_error is None:
+ custom_error = str(failure)
+
+ with pytest.raises(FailException):
+ botocore_utils.boto3_conn(aws_module)
+
+ fail_args = aws_module.fail_json.call_args
+ assert custom_error in fail_args[1]["msg"]
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_connection_info.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_connection_info.py
new file mode 100644
index 000000000..5cdf45f90
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_connection_info.py
@@ -0,0 +1,345 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from copy import deepcopy
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+import ansible_collections.amazon.aws.plugins.module_utils.botocore as utils_botocore
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleBotocoreError
+
+CREDENTIAL_MAP = dict(
+ access_key="aws_access_key_id",
+ secret_key="aws_secret_access_key",
+ session_token="aws_session_token",
+)
+BLANK_BOTO_PARAMS = dict(aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, verify=None)
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_module(monkeypatch):
+ aws_module = MagicMock()
+ aws_module.fail_json.side_effect = FailException()
+ aws_module.fail_json_aws.side_effect = FailException()
+ monkeypatch.setattr(aws_module, "params", sentinel.MODULE_PARAMS)
+ return aws_module
+
+
+@pytest.fixture
+def fake_botocore(monkeypatch):
+ # Note: this isn't a monkey-patched real-botocore, this is a complete fake.
+ fake_session = MagicMock()
+ fake_session.get_config_variable.return_value = sentinel.BOTO3_REGION
+ fake_session_module = MagicMock()
+ fake_session_module.Session.return_value = fake_session
+ fake_config_module = MagicMock()
+ fake_config_module.Config.return_value = sentinel.BOTO3_CONFIG
+ fake_botocore = MagicMock()
+ monkeypatch.setattr(fake_botocore, "session", fake_session_module)
+ monkeypatch.setattr(fake_botocore, "config", fake_config_module)
+ # Patch exceptions in
+ monkeypatch.setattr(fake_botocore, "exceptions", botocore.exceptions)
+
+ return fake_botocore
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(utils_botocore, "_aws_region", region_method)
+ region_method.return_value = sentinel.RETURNED_REGION
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.get_aws_connection_info
+###############################################################
+def test_get_aws_connection_info_simple(monkeypatch, aws_module, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+ connection_info_method.return_value = sentinel.RETURNED_INFO
+
+ assert botocore_utils.get_aws_connection_info(aws_module) is sentinel.RETURNED_INFO
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # args[0]
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+
+def test_get_aws_connection_info_exception_nested(monkeypatch, aws_module, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ connection_info_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ botocore_utils.get_aws_connection_info(aws_module)
+
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+ fail_args = aws_module.fail_json.call_args
+ assert fail_args == call(msg=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ # call_args[1] == kwargs
+ assert fail_args[1]["msg"] is sentinel.ERROR_MSG
+ assert fail_args[1]["exception"] is sentinel.ERROR_EX
+
+
+def test_get_aws_connection_info_exception_msg(monkeypatch, aws_module, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG)
+ connection_info_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ botocore_utils.get_aws_connection_info(aws_module)
+
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.MODULE_PARAMS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.MODULE_PARAMS
+
+ fail_args = aws_module.fail_json.call_args
+ assert fail_args == call(msg=sentinel.ERROR_MSG)
+ # call_args[1] == kwargs
+ assert fail_args[1]["msg"] is sentinel.ERROR_MSG
+
+
+###############################################################
+# module_utils.botocore._get_aws_connection_info
+###############################################################
+@pytest.mark.parametrize("param_name", ["access_key", "secret_key", "session_token"])
+def test_aws_connection_info_single_cred(monkeypatch, botocore_utils, param_name):
+ options = {param_name: sentinel.PARAM_CRED, "profile": sentinel.PARAM_PROFILE}
+ blank_params = deepcopy(BLANK_BOTO_PARAMS)
+ boto_param_name = CREDENTIAL_MAP[param_name]
+ expected_params = deepcopy(blank_params)
+ expected_params[boto_param_name] = sentinel.PARAM_CRED
+
+ # profile + cred is explicitly not supported
+ with pytest.raises(AnsibleBotocoreError, match="Passing both"):
+ botocore_utils._aws_connection_info(options)
+
+ # However a blank/empty profile is ok.
+ options["profile"] = None
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert boto_params[boto_param_name] is sentinel.PARAM_CRED
+
+ options["profile"] = ""
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert boto_params[boto_param_name] is sentinel.PARAM_CRED
+
+ del options["profile"]
+
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert boto_params[boto_param_name] is sentinel.PARAM_CRED
+
+ options[param_name] = None
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == blank_params
+ assert boto_params[boto_param_name] is None
+
+ options[param_name] = ""
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == blank_params
+ assert boto_params[boto_param_name] is None
+
+ options[param_name] = b"Originally bytes String"
+ expected_params[boto_param_name] = "Originally bytes String" # Converted to string
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+
+
+@pytest.mark.parametrize(
+ "options, expected_validate",
+ [
+ (dict(validate_certs=True, aws_ca_bundle=sentinel.PARAM_BUNDLE), sentinel.PARAM_BUNDLE),
+ (dict(validate_certs=False, aws_ca_bundle=sentinel.PARAM_BUNDLE), False),
+ (dict(validate_certs=True, aws_ca_bundle=""), True),
+ (dict(validate_certs=False, aws_ca_bundle=""), False),
+ (dict(validate_certs=True, aws_ca_bundle=None), True),
+ (dict(validate_certs=False, aws_ca_bundle=None), False),
+ (dict(validate_certs=True, aws_ca_bundle=b"Originally bytes String"), "Originally bytes String"),
+ ],
+)
+def test_aws_connection_info_validation(monkeypatch, botocore_utils, options, expected_validate):
+ expected_params = deepcopy(BLANK_BOTO_PARAMS)
+ expected_params["verify"] = expected_validate
+
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ boto_params["verify"] is expected_validate
+
+
+def test_aws_connection_info_profile(monkeypatch, botocore_utils):
+ expected_params = deepcopy(BLANK_BOTO_PARAMS)
+
+ options = {"profile": ""}
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+
+ options = {"profile": None}
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+
+ options = {"profile": sentinel.PARAM_PROFILE}
+ expected_params["profile_name"] = sentinel.PARAM_PROFILE
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert boto_params["profile_name"] is sentinel.PARAM_PROFILE
+
+ options = {"profile": b"Originally bytes String"}
+ expected_params["profile_name"] = "Originally bytes String"
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+
+
+def test_aws_connection_info_config(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+ expected_params = deepcopy(BLANK_BOTO_PARAMS)
+
+ options = {}
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert fake_botocore.config.Config.called is False
+
+ options = {"aws_config": None}
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert fake_botocore.config.Config.called is False
+
+ options = {"aws_config": {"example_config_item": sentinel.PARAM_CONFIG}}
+ expected_params["aws_config"] = sentinel.BOTO3_CONFIG
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is None
+ assert boto_params == expected_params
+ assert fake_botocore.config.Config.called is True
+ config_args = fake_botocore.config.Config.call_args
+ assert config_args == call(example_config_item=sentinel.PARAM_CONFIG)
+
+
+def test_aws_connection_info_endpoint_url(monkeypatch, botocore_utils):
+ expected_params = deepcopy(BLANK_BOTO_PARAMS)
+
+ options = {"endpoint_url": sentinel.PARAM_ENDPOINT}
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is sentinel.PARAM_ENDPOINT
+ assert boto_params == expected_params
+
+
+def test_aws_connection_info_complex(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+
+ expected_params = dict(
+ aws_access_key_id=sentinel.PARAM_ACCESS,
+ aws_secret_access_key=sentinel.PARAM_SECRET,
+ aws_session_token=sentinel.PARAM_SESSION,
+ verify=sentinel.PARAM_BUNDLE,
+ aws_config=sentinel.BOTO3_CONFIG,
+ )
+ options = dict(
+ endpoint_url=sentinel.PARAM_ENDPOINT,
+ access_key=sentinel.PARAM_ACCESS,
+ secret_key=sentinel.PARAM_SECRET,
+ session_token=sentinel.PARAM_SESSION,
+ validate_certs=True,
+ aws_ca_bundle=sentinel.PARAM_BUNDLE,
+ aws_config={"example_config_item": sentinel.PARAM_CONFIG},
+ )
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is sentinel.PARAM_ENDPOINT
+ assert boto_params == expected_params
+ assert fake_botocore.config.Config.called is True
+ config_args = fake_botocore.config.Config.call_args
+ assert config_args == call(example_config_item=sentinel.PARAM_CONFIG)
+ assert botocore_utils._aws_region.called is True
+ region_args = botocore_utils._aws_region.call_args
+ assert region_args == call(options)
+ assert region_args[0][0] is options
+
+
+def test_aws_connection_info_complex_profile(monkeypatch, botocore_utils, fake_botocore):
+ monkeypatch.setattr(botocore_utils, "botocore", fake_botocore)
+
+ expected_params = dict(
+ aws_access_key_id=None,
+ aws_secret_access_key=None,
+ aws_session_token=None,
+ profile_name=sentinel.PARAM_PROFILE,
+ verify=sentinel.PARAM_BUNDLE,
+ aws_config=sentinel.BOTO3_CONFIG,
+ )
+ options = dict(
+ endpoint_url=sentinel.PARAM_ENDPOINT,
+ access_key=None,
+ secret_key=None,
+ session_token=None,
+ profile=sentinel.PARAM_PROFILE,
+ validate_certs=True,
+ aws_ca_bundle=sentinel.PARAM_BUNDLE,
+ aws_config={"example_config_item": sentinel.PARAM_CONFIG},
+ )
+ region, endpoint_url, boto_params = botocore_utils._aws_connection_info(options)
+
+ assert region is sentinel.RETURNED_REGION
+ assert endpoint_url is sentinel.PARAM_ENDPOINT
+ assert boto_params == expected_params
+ assert fake_botocore.config.Config.called is True
+ config_args = fake_botocore.config.Config.call_args
+ assert config_args == call(example_config_item=sentinel.PARAM_CONFIG)
+ assert botocore_utils._aws_region.called is True
+ region_args = botocore_utils._aws_region.call_args
+ assert region_args == call(options)
+ assert region_args[0][0] is options
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_code.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_code.py
index 627ae4cb3..9f3e4194b 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_code.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_code.py
@@ -4,9 +4,6 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
try:
@@ -15,63 +12,63 @@ except ImportError:
# Handled by HAS_BOTO3
pass
-from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_code
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_code
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_is_boto3_error_code.py requires the python modules 'boto3' and 'botocore'")
-class TestIsBoto3ErrorCode():
-
+class TestIsBoto3ErrorCode:
def _make_denied_exception(self):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "AccessDenied",
- "Message": "User: arn:aws:iam::123456789012:user/ExampleUser "
- + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ "Message": (
+ "User: arn:aws:iam::123456789012:user/ExampleUser "
+ + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'getUser')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "getUser",
+ )
def _make_unexpected_exception(self):
return botocore.exceptions.ClientError(
{
- "Error": {
- "Code": "SomeThingWentWrong",
- "Message": "Boom!"
- },
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "Error": {"Code": "SomeThingWentWrong", "Message": "Boom!"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_encoded_exception(self):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "PermissionDenied",
- "Message": "You are not authorized to perform this operation. Encoded authorization failure message: " +
- "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ" +
- "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU" +
- "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26" +
- "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx" +
- "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf" +
- "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI" +
- "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp" +
- "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB" +
- "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ "Message": (
+ "You are not authorized to perform this operation. Encoded authorization failure message: "
+ + "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ"
+ + "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU"
+ + "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26"
+ + "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx"
+ + "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf"
+ + "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI"
+ + "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp"
+ + "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB"
+ + "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_botocore_exception(self):
- return botocore.exceptions.EndpointConnectionError(endpoint_url='junk.endpoint')
+ return botocore.exceptions.EndpointConnectionError(endpoint_url="junk.endpoint")
###
# Test that is_boto3_error_code does what's expected when used in a try/except block
@@ -87,7 +84,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_single__raise__client(self):
# 'AccessDenied' error, should be caught in our try/except in _do_try_code
thrown_exception = self._make_denied_exception()
- codes_to_catch = 'AccessDenied'
+ codes_to_catch = "AccessDenied"
caught_exception = self._do_try_code(thrown_exception, codes_to_catch)
assert caught_exception == thrown_exception
@@ -95,7 +92,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_single__raise__unexpected(self):
# 'SomeThingWentWrong' error, shouldn't be caught because the Code doesn't match
thrown_exception = self._make_unexpected_exception()
- codes_to_catch = 'AccessDenied'
+ codes_to_catch = "AccessDenied"
with pytest.raises(botocore.exceptions.ClientError) as context:
self._do_try_code(thrown_exception, codes_to_catch)
@@ -105,7 +102,7 @@ class TestIsBoto3ErrorCode():
# BotoCoreExceptions don't have an error code, so shouldn't be caught (and shouldn't throw
# some other error due to the missing 'Code' data on the exception)
thrown_exception = self._make_botocore_exception()
- codes_to_catch = 'AccessDenied'
+ codes_to_catch = "AccessDenied"
with pytest.raises(botocore.exceptions.BotoCoreError) as context:
self._do_try_code(thrown_exception, codes_to_catch)
@@ -116,13 +113,13 @@ class TestIsBoto3ErrorCode():
# 'AccessDenied' error, should be caught in our try/except in _do_try_code
# test with multiple possible codes to catch
thrown_exception = self._make_denied_exception()
- codes_to_catch = ['AccessDenied', 'NotAccessDenied']
+ codes_to_catch = ["AccessDenied", "NotAccessDenied"]
caught_exception = self._do_try_code(thrown_exception, codes_to_catch)
assert caught_exception == thrown_exception
thrown_exception = self._make_denied_exception()
- codes_to_catch = ['NotAccessDenied', 'AccessDenied']
+ codes_to_catch = ["NotAccessDenied", "AccessDenied"]
caught_exception = self._do_try_code(thrown_exception, codes_to_catch)
assert caught_exception == thrown_exception
@@ -131,7 +128,7 @@ class TestIsBoto3ErrorCode():
# 'SomeThingWentWrong' error, shouldn't be caught because the Code doesn't match
# test with multiple possible codes to catch
thrown_exception = self._make_unexpected_exception()
- codes_to_catch = ['NotAccessDenied', 'AccessDenied']
+ codes_to_catch = ["NotAccessDenied", "AccessDenied"]
with pytest.raises(botocore.exceptions.ClientError) as context:
self._do_try_code(thrown_exception, codes_to_catch)
@@ -142,7 +139,7 @@ class TestIsBoto3ErrorCode():
# some other error due to the missing 'Code' data on the exception)
# test with multiple possible codes to catch
thrown_exception = self._make_botocore_exception()
- codes_to_catch = ['NotAccessDenied', 'AccessDenied']
+ codes_to_catch = ["NotAccessDenied", "AccessDenied"]
with pytest.raises(botocore.exceptions.BotoCoreError) as context:
self._do_try_code(thrown_exception, codes_to_catch)
@@ -154,7 +151,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_single__pass__client(self):
passed_exception = self._make_denied_exception()
- returned_exception = is_boto3_error_code('AccessDenied', e=passed_exception)
+ returned_exception = is_boto3_error_code("AccessDenied", e=passed_exception)
assert isinstance(passed_exception, returned_exception)
assert issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -163,7 +160,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_single__pass__unexpected(self):
passed_exception = self._make_unexpected_exception()
- returned_exception = is_boto3_error_code('AccessDenied', e=passed_exception)
+ returned_exception = is_boto3_error_code("AccessDenied", e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -172,7 +169,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_single__pass__botocore(self):
passed_exception = self._make_botocore_exception()
- returned_exception = is_boto3_error_code('AccessDenied', e=passed_exception)
+ returned_exception = is_boto3_error_code("AccessDenied", e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -181,14 +178,14 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_multiple__pass__client(self):
passed_exception = self._make_denied_exception()
- returned_exception = is_boto3_error_code(['NotAccessDenied', 'AccessDenied'], e=passed_exception)
+ returned_exception = is_boto3_error_code(["NotAccessDenied", "AccessDenied"], e=passed_exception)
assert isinstance(passed_exception, returned_exception)
assert issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
assert issubclass(returned_exception, Exception)
assert returned_exception.__name__ != "NeverEverRaisedException"
- returned_exception = is_boto3_error_code(['AccessDenied', 'NotAccessDenied'], e=passed_exception)
+ returned_exception = is_boto3_error_code(["AccessDenied", "NotAccessDenied"], e=passed_exception)
assert isinstance(passed_exception, returned_exception)
assert issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -197,7 +194,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_multiple__pass__unexpected(self):
passed_exception = self._make_unexpected_exception()
- returned_exception = is_boto3_error_code(['NotAccessDenied', 'AccessDenied'], e=passed_exception)
+ returned_exception = is_boto3_error_code(["NotAccessDenied", "AccessDenied"], e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -206,7 +203,7 @@ class TestIsBoto3ErrorCode():
def test_is_boto3_error_code_multiple__pass__botocore(self):
passed_exception = self._make_botocore_exception()
- returned_exception = is_boto3_error_code(['NotAccessDenied', 'AccessDenied'], e=passed_exception)
+ returned_exception = is_boto3_error_code(["NotAccessDenied", "AccessDenied"], e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_message.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_message.py
index cd40a58dd..9cfc62d17 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_message.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_is_boto3_error_message.py
@@ -4,9 +4,6 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
try:
@@ -15,63 +12,63 @@ except ImportError:
# Handled by HAS_BOTO3
pass
-from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_message
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_message
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_is_boto3_error_message.py requires the python modules 'boto3' and 'botocore'")
-class TestIsBoto3ErrorMessaged():
-
+class TestIsBoto3ErrorMessaged:
def _make_denied_exception(self):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "AccessDenied",
- "Message": "User: arn:aws:iam::123456789012:user/ExampleUser "
- + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ "Message": (
+ "User: arn:aws:iam::123456789012:user/ExampleUser "
+ + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'getUser')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "getUser",
+ )
def _make_unexpected_exception(self):
return botocore.exceptions.ClientError(
{
- "Error": {
- "Code": "SomeThingWentWrong",
- "Message": "Boom!"
- },
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "Error": {"Code": "SomeThingWentWrong", "Message": "Boom!"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_encoded_exception(self):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "AccessDenied",
- "Message": "You are not authorized to perform this operation. Encoded authorization failure message: " +
- "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ" +
- "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU" +
- "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26" +
- "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx" +
- "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf" +
- "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI" +
- "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp" +
- "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB" +
- "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ "Message": (
+ "You are not authorized to perform this operation. Encoded authorization failure message: "
+ + "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ"
+ + "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU"
+ + "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26"
+ + "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx"
+ + "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf"
+ + "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI"
+ + "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp"
+ + "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB"
+ + "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_botocore_exception(self):
- return botocore.exceptions.EndpointConnectionError(endpoint_url='junk.endpoint')
+ return botocore.exceptions.EndpointConnectionError(endpoint_url="junk.endpoint")
def _do_try_message(self, exception, messages):
try:
@@ -87,7 +84,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__raise__client(self):
# error with 'is not authorized to perform' in the message, should be caught in our try/except in _do_try_code
thrown_exception = self._make_denied_exception()
- messages_to_catch = 'is not authorized to perform'
+ messages_to_catch = "is not authorized to perform"
caught_exception = self._do_try_message(thrown_exception, messages_to_catch)
@@ -96,7 +93,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__raise__unexpected(self):
# error with 'Boom!' as the message, shouldn't match and should fall through
thrown_exception = self._make_unexpected_exception()
- messages_to_catch = 'is not authorized to perform'
+ messages_to_catch = "is not authorized to perform"
with pytest.raises(botocore.exceptions.ClientError) as context:
self._do_try_message(thrown_exception, messages_to_catch)
@@ -106,7 +103,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__raise__botocore(self):
# Test that we don't catch BotoCoreError
thrown_exception = self._make_botocore_exception()
- messages_to_catch = 'is not authorized to perform'
+ messages_to_catch = "is not authorized to perform"
with pytest.raises(botocore.exceptions.BotoCoreError) as context:
self._do_try_message(thrown_exception, messages_to_catch)
@@ -119,7 +116,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__pass__client(self):
passed_exception = self._make_denied_exception()
- returned_exception = is_boto3_error_message('is not authorized to perform', e=passed_exception)
+ returned_exception = is_boto3_error_message("is not authorized to perform", e=passed_exception)
assert isinstance(passed_exception, returned_exception)
assert issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -128,7 +125,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__pass__unexpected(self):
passed_exception = self._make_unexpected_exception()
- returned_exception = is_boto3_error_message('is not authorized to perform', e=passed_exception)
+ returned_exception = is_boto3_error_message("is not authorized to perform", e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
@@ -137,7 +134,7 @@ class TestIsBoto3ErrorMessaged():
def test_is_boto3_error_message_single__pass__botocore(self):
passed_exception = self._make_botocore_exception()
- returned_exception = is_boto3_error_message('is not authorized to perform', e=passed_exception)
+ returned_exception = is_boto3_error_message("is not authorized to perform", e=passed_exception)
assert not isinstance(passed_exception, returned_exception)
assert not issubclass(returned_exception, botocore.exceptions.ClientError)
assert not issubclass(returned_exception, botocore.exceptions.BotoCoreError)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_merge_botocore_config.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_merge_botocore_config.py
new file mode 100644
index 000000000..f5a8710cd
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_merge_botocore_config.py
@@ -0,0 +1,68 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+import ansible_collections.amazon.aws.plugins.module_utils.botocore as utils_botocore
+
+MINIMAL_CONFIG = {
+ "user_agent_extra": "Ansible/unit-test",
+}
+
+
+@pytest.fixture
+def basic_config():
+ config = botocore.config.Config(**MINIMAL_CONFIG)
+ return config
+
+
+def test_none_config(monkeypatch, basic_config):
+ original_options = basic_config._user_provided_options.copy()
+
+ monkeypatch.setattr(basic_config, "merge", MagicMock(name="merge"))
+ updated_config = utils_botocore._merge_botocore_config(basic_config, None)
+ assert not basic_config.merge.called
+ assert basic_config._user_provided_options == original_options
+ assert updated_config._user_provided_options == original_options
+
+
+def test_botocore_config(basic_config):
+ original_options = basic_config._user_provided_options.copy()
+ config_b = botocore.config.Config(parameter_validation=False)
+ updated_config = utils_botocore._merge_botocore_config(basic_config, config_b)
+
+ assert basic_config._user_provided_options == original_options
+ assert not updated_config._user_provided_options == original_options
+ assert updated_config._user_provided_options.get("parameter_validation") is False
+ assert updated_config._user_provided_options.get("user_agent_extra") == "Ansible/unit-test"
+
+ config_c = botocore.config.Config(user_agent_extra="Ansible/unit-test Updated")
+ updated_config = utils_botocore._merge_botocore_config(updated_config, config_c)
+ assert updated_config._user_provided_options.get("parameter_validation") is False
+ assert updated_config._user_provided_options.get("user_agent_extra") == "Ansible/unit-test Updated"
+
+
+def test_botocore_dict(basic_config):
+ original_options = basic_config._user_provided_options.copy()
+ config_b = dict(parameter_validation=False)
+ updated_config = utils_botocore._merge_botocore_config(basic_config, config_b)
+
+ assert basic_config._user_provided_options == original_options
+ assert not updated_config._user_provided_options == original_options
+ assert updated_config._user_provided_options.get("parameter_validation") is False
+ assert updated_config._user_provided_options.get("user_agent_extra") == "Ansible/unit-test"
+
+ config_c = dict(user_agent_extra="Ansible/unit-test Updated")
+ updated_config = utils_botocore._merge_botocore_config(updated_config, config_c)
+ assert updated_config._user_provided_options.get("parameter_validation") is False
+ assert updated_config._user_provided_options.get("user_agent_extra") == "Ansible/unit-test Updated"
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_normalize_boto3_result.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_normalize_boto3_result.py
index 71da9d66d..590203c06 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_normalize_boto3_result.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_normalize_boto3_result.py
@@ -1,59 +1,38 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
from ansible_collections.amazon.aws.plugins.module_utils.botocore import normalize_boto3_result
-example_date_txt = '2020-12-30T00:00:00.000Z'
-example_date_iso = '2020-12-30T00:00:00+00:00'
+example_date_txt = "2020-12-30T00:00:00.000Z"
+example_date_iso = "2020-12-30T00:00:00+00:00"
try:
from dateutil import parser as date_parser
+
example_date = date_parser.parse(example_date_txt)
except ImportError:
example_date = None
- pytestmark = pytest.mark.skip("test_normalize_boto3_result.py requires the python module dateutil (python-dateutil)")
+ pytestmark = pytest.mark.skip(
+ "test_normalize_boto3_result.py requires the python module dateutil (python-dateutil)"
+ )
normalize_boto3_result_data = [
- (dict(),
- dict()
- ),
+ (dict(), dict()),
# Bool
- (dict(param1=False),
- dict(param1=False)
- ),
+ (dict(param1=False), dict(param1=False)),
# Simple string (shouldn't be touched
- (dict(date_example=example_date_txt),
- dict(date_example=example_date_txt)
- ),
- (dict(date_example=example_date_iso),
- dict(date_example=example_date_iso)
- ),
+ (dict(date_example=example_date_txt), dict(date_example=example_date_txt)),
+ (dict(date_example=example_date_iso), dict(date_example=example_date_iso)),
# Datetime -> String
- (dict(date_example=example_date),
- dict(date_example=example_date_iso)
- ),
- (list(),
- list()
- ),
- (list([False]),
- list([False])
- ),
- (list([example_date_txt]),
- list([example_date_txt])
- ),
- (list([example_date_iso]),
- list([example_date_iso])
- ),
- (list([example_date]),
- list([example_date_iso])
- ),
+ (dict(date_example=example_date), dict(date_example=example_date_iso)),
+ (list(), list()),
+ (list([False]), list([False])),
+ (list([example_date_txt]), list([example_date_txt])),
+ (list([example_date_iso]), list([example_date_iso])),
+ (list([example_date]), list([example_date_iso])),
]
@pytest.mark.parametrize("input_params, output_params", normalize_boto3_result_data)
def test_normalize_boto3_result(input_params, output_params):
-
assert normalize_boto3_result(input_params) == output_params
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_sdk_versions.py b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_sdk_versions.py
new file mode 100644
index 000000000..7e2877b6b
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/botocore/test_sdk_versions.py
@@ -0,0 +1,250 @@
+# (c) 2021 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import warnings
+from unittest.mock import sentinel
+
+import pytest
+
+try:
+ import boto3
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+from ansible_collections.amazon.aws.plugins.module_utils import botocore as botocore_utils
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import boto3_at_least
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import botocore_at_least
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleBotocoreError
+
+DUMMY_VERSION = "5.5.5.5"
+
+TEST_VERSIONS = [
+ ["1.1.1", "2.2.2", True],
+ ["1.1.1", "0.0.1", False],
+ ["9.9.9", "9.9.9", True],
+ ["9.9.9", "9.9.10", True],
+ ["9.9.9", "9.10.9", True],
+ ["9.9.9", "10.9.9", True],
+ ["9.9.9", "9.9.8", False],
+ ["9.9.9", "9.8.9", False],
+ ["9.9.9", "8.9.9", False],
+ ["10.10.10", "10.10.10", True],
+ ["10.10.10", "10.10.11", True],
+ ["10.10.10", "10.11.10", True],
+ ["10.10.10", "11.10.10", True],
+ ["10.10.10", "10.10.9", False],
+ ["10.10.10", "10.9.10", False],
+ ["10.10.10", "9.19.10", False],
+]
+
+if not HAS_BOTO3:
+ pytest.mark.skip(
+ "test_require_at_least.py requires the python modules 'boto3' and 'botocore'", allow_module_level=True
+ )
+
+
+# ========================================================
+# Test gather_sdk_versions
+# ========================================================
+def test_gather_sdk_versions_missing_botocore(monkeypatch):
+ monkeypatch.setattr(botocore_utils, "HAS_BOTO3", False)
+ sdk_versions = botocore_utils.gather_sdk_versions()
+ assert isinstance(sdk_versions, dict)
+ assert sdk_versions == {}
+
+
+def test_gather_sdk_versions(monkeypatch):
+ monkeypatch.setattr(botocore_utils, "HAS_BOTO3", True)
+ monkeypatch.setattr(botocore, "__version__", sentinel.BOTOCORE_VERSION)
+ monkeypatch.setattr(boto3, "__version__", sentinel.BOTO3_VERSION)
+
+ sdk_versions = botocore_utils.gather_sdk_versions()
+ assert isinstance(sdk_versions, dict)
+ assert len(sdk_versions) == 2
+ assert "boto3_version" in sdk_versions
+ assert "botocore_version" in sdk_versions
+ assert sdk_versions["boto3_version"] is sentinel.BOTO3_VERSION
+ assert sdk_versions["botocore_version"] is sentinel.BOTOCORE_VERSION
+
+
+# ========================================================
+# Test botocore_at_least
+# ========================================================
+@pytest.mark.parametrize("desired_version, compare_version, at_least", TEST_VERSIONS)
+def test_botocore_at_least(monkeypatch, desired_version, compare_version, at_least):
+ monkeypatch.setattr(botocore, "__version__", compare_version)
+ # Set boto3 version to a known value (tests are on both sides) to make
+ # sure we're comparing the right library
+ monkeypatch.setattr(boto3, "__version__", DUMMY_VERSION)
+
+ assert at_least == botocore_at_least(desired_version)
+
+
+# ========================================================
+# Test boto3_at_least
+# ========================================================
+@pytest.mark.parametrize("desired_version, compare_version, at_least", TEST_VERSIONS)
+def test_boto3_at_least(monkeypatch, desired_version, compare_version, at_least):
+ # Set botocore version to a known value (tests are on both sides) to make
+ # sure we're comparing the right library
+ monkeypatch.setattr(botocore, "__version__", DUMMY_VERSION)
+ monkeypatch.setattr(boto3, "__version__", compare_version)
+
+ assert at_least == boto3_at_least(desired_version)
+
+
+# ========================================================
+# Test check_sdk_version_supported
+# ========================================================
+def test_check_sdk_missing_botocore(monkeypatch):
+ monkeypatch.setattr(botocore_utils, "HAS_BOTO3", False)
+
+ with pytest.raises(AnsibleBotocoreError) as exception:
+ botocore_utils.check_sdk_version_supported()
+
+ assert "botocore and boto3" in exception.value.message
+
+ with warnings.catch_warnings():
+ # We should be erroring out before we get as far as testing versions
+ # so fail if a warning is emitted
+ warnings.simplefilter("error")
+ with pytest.raises(AnsibleBotocoreError) as exception:
+ botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+
+ assert "botocore and boto3" in exception.value.message
+
+
+def test_check_sdk_all_good(monkeypatch):
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTOCORE_VERSION", "6.6.6")
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTO3_VERSION", "6.6.6")
+ monkeypatch.setattr(boto3, "__version__", "6.6.6")
+ monkeypatch.setattr(botocore, "__version__", "6.6.6")
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported()
+
+ assert supported is True
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+
+ assert supported is True
+
+
+def test_check_sdk_all_good_override(monkeypatch):
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTOCORE_VERSION", "6.6.6")
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTO3_VERSION", "6.6.6")
+ monkeypatch.setattr(boto3, "__version__", "5.5.5")
+ monkeypatch.setattr(botocore, "__version__", "5.5.5")
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(
+ botocore_version="5.5.5",
+ boto3_version="5.5.5",
+ )
+
+ assert supported is True
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(
+ botocore_version="5.5.5",
+ boto3_version="5.5.5",
+ warn=warnings.warn,
+ )
+
+ assert supported is True
+
+
+@pytest.mark.parametrize("desired_version, compare_version, at_least", TEST_VERSIONS)
+def test_check_sdk_botocore(monkeypatch, desired_version, compare_version, at_least):
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTOCORE_VERSION", desired_version)
+ monkeypatch.setattr(botocore, "__version__", compare_version)
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTO3_VERSION", DUMMY_VERSION)
+ monkeypatch.setattr(boto3, "__version__", DUMMY_VERSION)
+
+ # Without warn being passed we should just return False
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported()
+
+ assert supported is at_least
+
+ if supported:
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ else:
+ with pytest.warns(UserWarning, match="botocore") as recorded_warnings:
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ assert len(recorded_warnings) == 1
+ w = recorded_warnings.pop(UserWarning)
+ assert "boto3" not in str(w.message)
+
+ assert supported is at_least
+
+
+@pytest.mark.parametrize("desired_version, compare_version, at_least", TEST_VERSIONS)
+def test_check_sdk_boto3(monkeypatch, desired_version, compare_version, at_least):
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTO3_VERSION", desired_version)
+ monkeypatch.setattr(boto3, "__version__", compare_version)
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTOCORE_VERSION", DUMMY_VERSION)
+ monkeypatch.setattr(botocore, "__version__", DUMMY_VERSION)
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported()
+
+ assert supported is at_least
+
+ if supported:
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ else:
+ with pytest.warns(UserWarning, match="boto3") as recorded_warnings:
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ assert len(recorded_warnings) == 1
+ w = recorded_warnings.pop(UserWarning)
+ assert "boto3" in str(w.message)
+
+ assert supported is at_least
+
+
+@pytest.mark.parametrize("desired_version, compare_version, at_least", TEST_VERSIONS)
+def test_check_sdk_both(monkeypatch, desired_version, compare_version, at_least):
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTO3_VERSION", desired_version)
+ monkeypatch.setattr(boto3, "__version__", compare_version)
+ monkeypatch.setattr(botocore_utils, "MINIMUM_BOTOCORE_VERSION", desired_version)
+ monkeypatch.setattr(botocore, "__version__", compare_version)
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported()
+ assert supported is at_least
+
+ if supported:
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ else:
+ message_map = dict()
+ with pytest.warns(UserWarning) as recorded_warnings:
+ supported = botocore_utils.check_sdk_version_supported(warn=warnings.warn)
+ assert len(recorded_warnings) == 2
+ for w in recorded_warnings:
+ if "boto3" in str(w.message):
+ message_map["boto3"] = str(w.message)
+ elif "botocore" in str(w.message):
+ message_map["botocore"] = str(w.message)
+ assert "boto3" in message_map
+ assert "botocore" in message_map
+ assert supported is at_least
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_backoff_iterator.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_backoff_iterator.py
index 5fee115c2..5572f406e 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_backoff_iterator.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_backoff_iterator.py
@@ -3,9 +3,6 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
from ansible_collections.amazon.aws.plugins.module_utils.cloud import BackoffIterator
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_cloud_retry.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_cloud_retry.py
index ce5f03f11..06119d7f6 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_cloud_retry.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_cloud_retry.py
@@ -3,18 +3,15 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import random
from datetime import datetime
+
import pytest
from ansible_collections.amazon.aws.plugins.module_utils.cloud import CloudRetry
-class TestCloudRetry():
-
+class TestCloudRetry:
error_codes = [400, 500, 600]
custom_error_codes = [100, 200, 300]
@@ -22,11 +19,12 @@ class TestCloudRetry():
"""
custom exception class for testing
"""
+
def __init__(self, status):
self.status = status
def __str__(self):
- return "TestException with status: {0}".format(self.status)
+ return f"TestException with status: {self.status}"
class UnitTestsRetry(CloudRetry):
base_class = Exception
@@ -40,7 +38,7 @@ class TestCloudRetry():
@staticmethod
def status_code_from_exception(error):
- return error.status['response']['status']
+ return error.status["response"]["status"]
@staticmethod
def found(response_code, catch_extra_error_codes=None):
@@ -72,30 +70,12 @@ class TestCloudRetry():
return True
# ========================================================
- # retry original backoff
- # ========================================================
- def test_retry_backoff(self):
-
- @TestCloudRetry.UnitTestsRetry.backoff(tries=3, delay=1, backoff=1.1,
- catch_extra_error_codes=TestCloudRetry.error_codes)
- def test_retry_func():
- if test_retry_func.counter < 2:
- test_retry_func.counter += 1
- raise self.OurTestException(status=random.choice(TestCloudRetry.error_codes))
- else:
- return True
-
- test_retry_func.counter = 0
- ret = test_retry_func()
- assert ret is True
-
- # ========================================================
# retry exponential backoff
# ========================================================
def test_retry_exponential_backoff(self):
-
- @TestCloudRetry.UnitTestsRetry.exponential_backoff(retries=3, delay=1, backoff=1.1, max_delay=3,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @TestCloudRetry.UnitTestsRetry.exponential_backoff(
+ retries=3, delay=1, backoff=1.1, max_delay=3, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def test_retry_func():
if test_retry_func.counter < 2:
test_retry_func.counter += 1
@@ -110,8 +90,9 @@ class TestCloudRetry():
def test_retry_exponential_backoff_with_unexpected_exception(self):
unexpected_except = self.OurTestException(status=100)
- @TestCloudRetry.UnitTestsRetry.exponential_backoff(retries=3, delay=1, backoff=1.1, max_delay=3,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @TestCloudRetry.UnitTestsRetry.exponential_backoff(
+ retries=3, delay=1, backoff=1.1, max_delay=3, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def test_retry_func():
if test_retry_func.counter == 0:
test_retry_func.counter += 1
@@ -129,8 +110,9 @@ class TestCloudRetry():
# retry jittered backoff
# ========================================================
def test_retry_jitter_backoff(self):
- @TestCloudRetry.UnitTestsRetry.jittered_backoff(retries=3, delay=1, max_delay=3,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @TestCloudRetry.UnitTestsRetry.jittered_backoff(
+ retries=3, delay=1, max_delay=3, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def test_retry_func():
if test_retry_func.counter < 2:
test_retry_func.counter += 1
@@ -145,8 +127,9 @@ class TestCloudRetry():
def test_retry_jittered_backoff_with_unexpected_exception(self):
unexpected_except = self.OurTestException(status=100)
- @TestCloudRetry.UnitTestsRetry.jittered_backoff(retries=3, delay=1, max_delay=3,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @TestCloudRetry.UnitTestsRetry.jittered_backoff(
+ retries=3, delay=1, max_delay=3, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def test_retry_func():
if test_retry_func.counter == 0:
test_retry_func.counter += 1
@@ -167,8 +150,9 @@ class TestCloudRetry():
def build_response():
return dict(response=dict(status=random.choice(TestCloudRetry.custom_error_codes)))
- @self.CustomRetry.exponential_backoff(retries=3, delay=1, backoff=1.1, max_delay=3,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @self.CustomRetry.exponential_backoff(
+ retries=3, delay=1, backoff=1.1, max_delay=3, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def test_retry_func():
if test_retry_func.counter < 2:
test_retry_func.counter += 1
@@ -185,8 +169,9 @@ class TestCloudRetry():
# Test wrapped function multiple times will restart the sleep
# =============================================================
def test_wrapped_function_called_several_times(self):
- @TestCloudRetry.UnitTestsRetry.exponential_backoff(retries=2, delay=2, backoff=4, max_delay=100,
- catch_extra_error_codes=TestCloudRetry.error_codes)
+ @TestCloudRetry.UnitTestsRetry.exponential_backoff(
+ retries=2, delay=2, backoff=4, max_delay=100, catch_extra_error_codes=TestCloudRetry.error_codes
+ )
def _fail():
raise self.OurTestException(status=random.choice(TestCloudRetry.error_codes))
@@ -206,13 +191,15 @@ class TestCloudRetry():
def _fail_key():
my_dict = dict()
- return my_dict['invalid_key']
+ return my_dict["invalid_key"]
def _fail_exception():
- raise Exception('bang')
+ raise Exception("bang")
key_retry_decorator = TestCloudRetry.KeyRetry.exponential_backoff(retries=2, delay=2, backoff=4, max_delay=100)
- key_and_index_retry_decorator = TestCloudRetry.KeyAndIndexRetry.exponential_backoff(retries=2, delay=2, backoff=4, max_delay=100)
+ key_and_index_retry_decorator = TestCloudRetry.KeyAndIndexRetry.exponential_backoff(
+ retries=2, delay=2, backoff=4, max_delay=100
+ )
expectations = [
[key_retry_decorator, _fail_exception, 0, Exception],
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_decorator_generation.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_decorator_generation.py
index 23b446763..ad3890503 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_decorator_generation.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_decorator_generation.py
@@ -3,19 +3,19 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+import sys
+from unittest.mock import MagicMock
+from unittest.mock import sentinel
import pytest
-import sys
-from ansible_collections.amazon.aws.plugins.module_utils.cloud import CloudRetry
from ansible_collections.amazon.aws.plugins.module_utils.cloud import BackoffIterator
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
-from ansible_collections.amazon.aws.tests.unit.compat.mock import sentinel
+from ansible_collections.amazon.aws.plugins.module_utils.cloud import CloudRetry
if sys.version_info < (3, 8):
- pytest.skip("accessing call_args.kwargs by keyword (instead of index) was introduced in Python 3.8", allow_module_level=True)
+ pytest.skip(
+ "accessing call_args.kwargs by keyword (instead of index) was introduced in Python 3.8", allow_module_level=True
+ )
@pytest.fixture
@@ -26,10 +26,11 @@ def patch_cloud_retry(monkeypatch):
Note: this doesn't test the operation of CloudRetry.base_decorator itself, but does make sure
we can fully exercise the various wrapper functions built over the top of it.
"""
+
def perform_patch():
decorator_generator = MagicMock()
decorator_generator.return_value = sentinel.decorator
- monkeypatch.setattr(CloudRetry, 'base_decorator', decorator_generator)
+ monkeypatch.setattr(CloudRetry, "base_decorator", decorator_generator)
return CloudRetry, decorator_generator
return perform_patch
@@ -49,10 +50,10 @@ def check_common_side_effects(decorator_generator):
assert decorator_generator.call_count == 1
gen_kw_args = decorator_generator.call_args.kwargs
- assert gen_kw_args['found'] is CloudRetry.found
- assert gen_kw_args['status_code_from_exception'] is CloudRetry.status_code_from_exception
+ assert gen_kw_args["found"] is CloudRetry.found
+ assert gen_kw_args["status_code_from_exception"] is CloudRetry.status_code_from_exception
- sleep_time_generator = gen_kw_args['sleep_time_generator']
+ sleep_time_generator = gen_kw_args["sleep_time_generator"]
assert isinstance(sleep_time_generator, BackoffIterator)
# Return the KW args used when CloudRetry.base_decorator was called and the sleep_time_generator
@@ -69,8 +70,8 @@ def test_create_exponential_backoff_with_defaults(patch_cloud_retry):
gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
- assert gen_kw_args['retries'] == 10
- assert gen_kw_args['catch_extra_error_codes'] is None
+ assert gen_kw_args["retries"] == 10
+ assert gen_kw_args["catch_extra_error_codes"] is None
assert sleep_time_generator.delay == 3
assert sleep_time_generator.backoff == 2
assert sleep_time_generator.max_delay == 60
@@ -80,13 +81,15 @@ def test_create_exponential_backoff_with_defaults(patch_cloud_retry):
def test_create_exponential_backoff_with_args(patch_cloud_retry):
cloud_retry, decorator_generator = patch_cloud_retry()
- decorator = cloud_retry.exponential_backoff(retries=11, delay=4, backoff=3, max_delay=61, catch_extra_error_codes=[42])
+ decorator = cloud_retry.exponential_backoff(
+ retries=11, delay=4, backoff=3, max_delay=61, catch_extra_error_codes=[42]
+ )
assert decorator is sentinel.decorator
gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
- assert gen_kw_args['catch_extra_error_codes'] == [42]
- assert gen_kw_args['retries'] == 11
+ assert gen_kw_args["catch_extra_error_codes"] == [42]
+ assert gen_kw_args["retries"] == 11
assert sleep_time_generator.delay == 4
assert sleep_time_generator.backoff == 3
assert sleep_time_generator.max_delay == 61
@@ -101,8 +104,8 @@ def test_create_jittered_backoff_with_defaults(patch_cloud_retry):
gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
- assert gen_kw_args['catch_extra_error_codes'] is None
- assert gen_kw_args['retries'] == 10
+ assert gen_kw_args["catch_extra_error_codes"] is None
+ assert gen_kw_args["retries"] == 10
assert sleep_time_generator.delay == 3
assert sleep_time_generator.backoff == 2
assert sleep_time_generator.max_delay == 60
@@ -117,40 +120,9 @@ def test_create_jittered_backoff_with_args(patch_cloud_retry):
gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
- assert gen_kw_args['catch_extra_error_codes'] == [42]
- assert gen_kw_args['retries'] == 11
+ assert gen_kw_args["catch_extra_error_codes"] == [42]
+ assert gen_kw_args["retries"] == 11
assert sleep_time_generator.delay == 4
assert sleep_time_generator.backoff == 3
assert sleep_time_generator.max_delay == 61
assert sleep_time_generator.jitter is True
-
-
-def test_create_legacy_backoff_with_defaults(patch_cloud_retry):
- cloud_retry, decorator_generator = patch_cloud_retry()
-
- decorator = cloud_retry.backoff()
-
- gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
-
- assert gen_kw_args['catch_extra_error_codes'] is None
- assert gen_kw_args['retries'] == 10
- assert sleep_time_generator.delay == 3
- assert sleep_time_generator.backoff == 1.1
- assert sleep_time_generator.max_delay is None
- assert sleep_time_generator.jitter is False
-
-
-def test_create_legacy_backoff_with_args(patch_cloud_retry):
- cloud_retry, decorator_generator = patch_cloud_retry()
-
- # Note: the Keyword Args have different names here, and not all of them can be passed...
- decorator = cloud_retry.backoff(tries=11, delay=4, backoff=3, catch_extra_error_codes=[42])
-
- gen_kw_args, sleep_time_generator = check_common_side_effects(decorator_generator)
-
- assert gen_kw_args['catch_extra_error_codes'] == [42]
- assert gen_kw_args['retries'] == 11
- assert sleep_time_generator.delay == 4
- assert sleep_time_generator.backoff == 3
- assert sleep_time_generator.max_delay is None
- assert sleep_time_generator.jitter is False
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retries_found.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retries_found.py
index 21ad74d42..00e84c65d 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retries_found.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retries_found.py
@@ -3,32 +3,29 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
from ansible_collections.amazon.aws.plugins.module_utils.cloud import CloudRetry
def test_found_not_itterable():
- assert CloudRetry.found('404', 5) is False
- assert CloudRetry.found('404', None) is False
- assert CloudRetry.found('404', 404) is False
+ assert CloudRetry.found("404", 5) is False
+ assert CloudRetry.found("404", None) is False
+ assert CloudRetry.found("404", 404) is False
# This seems counter intuitive, but the second argument is supposed to be iterable...
assert CloudRetry.found(404, 404) is False
def test_found_no_match():
- assert CloudRetry.found('404', ['403']) is False
- assert CloudRetry.found('404', ['500', '403']) is False
- assert CloudRetry.found('404', {'403'}) is False
- assert CloudRetry.found('404', {'500', '403'}) is False
+ assert CloudRetry.found("404", ["403"]) is False
+ assert CloudRetry.found("404", ["500", "403"]) is False
+ assert CloudRetry.found("404", {"403"}) is False
+ assert CloudRetry.found("404", {"500", "403"}) is False
def test_found_match():
- assert CloudRetry.found('404', ['404']) is True
- assert CloudRetry.found('404', ['403', '404']) is True
- assert CloudRetry.found('404', ['404', '403']) is True
- assert CloudRetry.found('404', {'404'}) is True
- assert CloudRetry.found('404', {'403', '404'}) is True
+ assert CloudRetry.found("404", ["404"]) is True
+ assert CloudRetry.found("404", ["403", "404"]) is True
+ assert CloudRetry.found("404", ["404", "403"]) is True
+ assert CloudRetry.found("404", {"404"}) is True
+ assert CloudRetry.found("404", {"403", "404"}) is True
# Beware, this will generally only work with strings (they're iterable)
- assert CloudRetry.found('404', '404') is True
+ assert CloudRetry.found("404", "404") is True
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retry_func.py b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retry_func.py
index 609c0718b..c318f6186 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retry_func.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/cloud/test_retry_func.py
@@ -3,18 +3,18 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+import sys
+from unittest.mock import Mock
+from unittest.mock import sentinel
import pytest
-import sys
import ansible_collections.amazon.aws.plugins.module_utils.cloud as cloud_utils
-from ansible_collections.amazon.aws.tests.unit.compat.mock import Mock
-from ansible_collections.amazon.aws.tests.unit.compat.mock import sentinel
if sys.version_info < (3, 8):
- pytest.skip("accessing call_args.kwargs by keyword (instead of index) was introduced in Python 3.8", allow_module_level=True)
+ pytest.skip(
+ "accessing call_args.kwargs by keyword (instead of index) was introduced in Python 3.8", allow_module_level=True
+ )
class ExceptionA(Exception):
@@ -98,9 +98,7 @@ def test_no_match_with_extra_error_codes(retrier):
catch_extra_error_codes = sentinel.extra_codes
with pytest.raises(ExceptionA):
- _f, _result = retrier(
- func=func, found_f=found_f, catch_extra_error_codes=catch_extra_error_codes
- )
+ _f, _result = retrier(func=func, found_f=found_f, catch_extra_error_codes=catch_extra_error_codes)
assert func.called is True
assert func.call_count == 1
assert found_f.called is True
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/conftest.py b/ansible_collections/amazon/aws/tests/unit/module_utils/conftest.py
index f90055615..397dfac84 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/conftest.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/conftest.py
@@ -1,21 +1,19 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import json
import sys
-from io import BytesIO
import warnings
+from io import BytesIO
import pytest
import ansible.module_utils.basic
import ansible.module_utils.common
-from ansible.module_utils.six import PY3, string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.six import PY3
+from ansible.module_utils.six import string_types
@pytest.fixture
@@ -23,7 +21,7 @@ def stdin(mocker, request):
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
ansible.module_utils.basic._ANSIBLE_ARGS = None
old_argv = sys.argv
- sys.argv = ['ansible_unittest']
+ sys.argv = ["ansible_unittest"]
for var in ["_global_warnings", "_global_deprecations"]:
if hasattr(ansible.module_utils.common.warnings, var):
@@ -35,22 +33,22 @@ def stdin(mocker, request):
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
- if 'ANSIBLE_MODULE_ARGS' not in request.param:
- request.param = {'ANSIBLE_MODULE_ARGS': request.param}
- if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
- if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
+ if "ANSIBLE_MODULE_ARGS" not in request.param:
+ request.param = {"ANSIBLE_MODULE_ARGS": request.param}
+ if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_keep_remote_files"] = False
args = json.dumps(request.param)
else:
- raise Exception('Malformed data to the stdin pytest fixture')
+ raise Exception("Malformed data to the stdin pytest fixture")
- fake_stdin = BytesIO(to_bytes(args, errors='surrogate_or_strict'))
+ fake_stdin = BytesIO(to_bytes(args, errors="surrogate_or_strict"))
if PY3:
- mocker.patch('ansible.module_utils.basic.sys.stdin', mocker.MagicMock())
- mocker.patch('ansible.module_utils.basic.sys.stdin.buffer', fake_stdin)
+ mocker.patch("ansible.module_utils.basic.sys.stdin", mocker.MagicMock())
+ mocker.patch("ansible.module_utils.basic.sys.stdin.buffer", fake_stdin)
else:
- mocker.patch('ansible.module_utils.basic.sys.stdin', fake_stdin)
+ mocker.patch("ansible.module_utils.basic.sys.stdin", fake_stdin)
yield fake_stdin
@@ -63,17 +61,17 @@ def am(stdin, request):
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
ansible.module_utils.basic._ANSIBLE_ARGS = None
old_argv = sys.argv
- sys.argv = ['ansible_unittest']
+ sys.argv = ["ansible_unittest"]
argspec = {}
- if hasattr(request, 'param'):
+ if hasattr(request, "param"):
if isinstance(request.param, dict):
argspec = request.param
am = ansible.module_utils.basic.AnsibleModule(
argument_spec=argspec,
)
- am._name = 'ansible_unittest'
+ am._name = "ansible_unittest"
yield am
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_listener_rules.py b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_listener_rules.py
new file mode 100644
index 000000000..2045bc79a
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_listener_rules.py
@@ -0,0 +1,740 @@
+#
+# (c) 2024 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils import elbv2
+
+example_arn = "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-123456789abc/abcdef0123456789"
+example_arn2 = "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-0123456789ab/0123456789abcdef"
+
+
+test_rules = [
+ (
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "authenticate",
+ "SessionCookieName": "AWSELBAuthSessionCookie",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UseExistingClientSecret": True,
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {},
+ ),
+ (
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "authenticate",
+ "SessionCookieName": "AWSELBAuthSessionCookie",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UseExistingClientSecret": True,
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "authenticate",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {},
+ ),
+ (
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "authenticate",
+ "SessionCookieName": "AWSELBAuthSessionCookie",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UseExistingClientSecret": True,
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "deny",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {
+ "Actions": [
+ {
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "Issuer": "https://samples.auth0.com",
+ "Scope": "openid",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UseExistingClientSecret": True,
+ "UserInfoEndpoint": "https://samples.auth0.com/userinfo",
+ "OnUnauthenticatedRequest": "deny",
+ },
+ "Order": 1,
+ "Type": "authenticate-oidc",
+ }
+ ],
+ },
+ ),
+ (
+ {
+ "Actions": [{"TargetGroupName": "my_target_group", "Type": "forward"}],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test", "/prod"]}],
+ "Priority": 2,
+ },
+ {
+ "Actions": [{"TargetGroupName": "my_target_group", "Type": "forward"}],
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Priority": 2,
+ },
+ {
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ },
+ ),
+]
+
+
+@pytest.mark.parametrize("current_rule,new_rule,modified_rule", test_rules)
+def test__compare_rule(mocker, current_rule, new_rule, modified_rule):
+ mocker.patch(
+ "ansible_collections.amazon.aws.plugins.module_utils.elbv2.ELBListenerRules._get_elb_listener_rules"
+ ).return_value = MagicMock()
+ mocker.patch(
+ "ansible_collections.amazon.aws.plugins.module_utils.elbv2.get_elb_listener"
+ ).return_value = MagicMock()
+ module = MagicMock()
+ connection = MagicMock()
+ elb_arn = MagicMock()
+
+ elb_listener_rules = elbv2.ELBListenerRules(connection, module, elb_arn, [], [])
+
+ assert modified_rule == elb_listener_rules._compare_rule(current_rule, new_rule)
+
+
+test_listeners_rules = [
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ {
+ "Priority": "2",
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ },
+ ],
+ [
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ ],
+ {},
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ {
+ "Priority": "2",
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": True,
+ },
+ },
+ ],
+ },
+ {
+ "Priority": 3,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ ],
+ {
+ "to_set_priority": [
+ {
+ "Priority": 3,
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ }
+ ],
+ "to_add": [
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": False,
+ },
+ },
+ ],
+ },
+ ],
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "2",
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ },
+ ],
+ [
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ ],
+ {
+ "to_set_priority": [
+ {
+ "Priority": 2,
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ },
+ {
+ "Priority": 1,
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ {
+ "Priority": "2",
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/123",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ {
+ "Priority": 3,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": True,
+ },
+ },
+ ],
+ },
+ ],
+ {
+ "to_add": [
+ {
+ "Priority": 3,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": False,
+ },
+ },
+ ],
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/abc",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ ],
+ {
+ "to_add": [
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["yolo.rocks"]}],
+ "Actions": [{"TargetGroupName": "target2", "Type": "forward"}],
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ },
+ },
+ ],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/oidc",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": True,
+ },
+ },
+ ],
+ }
+ ],
+ {
+ "to_modify": [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {"TargetGroupName": "oidc-target-01", "Type": "forward", "Order": 2},
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "id123645",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": False,
+ },
+ },
+ ],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/oidc",
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ },
+ },
+ ],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/oidc",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "ClientSecret": "testSecret123!@#$",
+ },
+ },
+ ],
+ }
+ ],
+ {
+ "to_modify": [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "Issuer": "https://sample.oauth.com/issuer",
+ "AuthorizationEndpoint": "https://sample.oauth.com",
+ "TokenEndpoint": "https://sample.oauth.com/oauth/token",
+ "UserInfoEndpoint": "https://sample.oauth.com/userinfo",
+ "ClientId": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH",
+ "ClientSecret": "testSecret123!@#$",
+ "UseExistingClientSecret": False,
+ },
+ },
+ ],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/oidc",
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "1",
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "abcdef1234567890",
+ "Issuer": "https://samples.auth0.com/",
+ "OnUnauthenticatedRequest": "authenticate",
+ "Scope": "openid",
+ "SessionCookieName": "AWSELBAuthSessionCookie",
+ "SessionTimeout": 604800,
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UserInfoEndpoint": "https://samples.auth0.com/oauth/userinfo",
+ },
+ },
+ ],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/oidc",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "path-pattern", "Values": ["/test"]}],
+ "Actions": [
+ {
+ "Type": "authenticate-oidc",
+ "Order": 1,
+ "AuthenticateOidcConfig": {
+ "AuthorizationEndpoint": "https://samples.auth0.com/authorize",
+ "ClientId": "abcdef1234567890",
+ "Issuer": "https://samples.auth0.com/",
+ "OnUnauthenticatedRequest": "authenticate",
+ "Scope": "openid",
+ "TokenEndpoint": "https://samples.auth0.com/oauth/token",
+ "UserInfoEndpoint": "https://samples.auth0.com/oauth/userinfo",
+ "UseExistingClientSecret": True,
+ },
+ },
+ ],
+ }
+ ],
+ {},
+ ),
+ (
+ [
+ {
+ "Priority": "default",
+ "IsDefault": True,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/default",
+ },
+ {
+ "Priority": "1",
+ "IsDefault": False,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/rule-1",
+ },
+ ],
+ [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "another_target", "Type": "forward"}],
+ },
+ ],
+ {
+ "to_modify": [
+ {
+ "Priority": 1,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "another_target", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/rule-1",
+ },
+ ]
+ },
+ ),
+ (
+ [
+ {
+ "Priority": "default",
+ "IsDefault": True,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/default",
+ },
+ {
+ "Priority": "1",
+ "IsDefault": False,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/rule-1",
+ },
+ ],
+ [
+ {
+ "Priority": 2,
+ "Conditions": [{"Field": "host-header", "Values": ["bla.tld"]}],
+ "Actions": [{"TargetGroupName": "target1", "Type": "forward"}],
+ },
+ ],
+ {
+ "to_set_priority": [
+ {
+ "Priority": 2,
+ "RuleArn": "arn:aws:elasticloadbalancing:::listener-rule/app/ansible-test/rule-1",
+ },
+ ]
+ },
+ ),
+]
+
+
+@pytest.mark.parametrize("current_rules,rules,expected", test_listeners_rules)
+def test_compare_rules(mocker, current_rules, rules, expected):
+ mocker.patch(
+ "ansible_collections.amazon.aws.plugins.module_utils.elbv2.get_elb_listener"
+ ).return_value = MagicMock()
+ mocker.patch(
+ "ansible_collections.amazon.aws.plugins.module_utils.elbv2.ELBListenerRules._ensure_rules_action_has_arn"
+ ).return_value = rules
+ mocker.patch(
+ "ansible_collections.amazon.aws.plugins.module_utils.elbv2.ELBListenerRules._get_elb_listener_rules"
+ ).return_value = current_rules
+ module = MagicMock()
+ connection = MagicMock()
+ elb_arn = MagicMock()
+
+ elb_listener_rules = elbv2.ELBListenerRules(connection, module, elb_arn, rules, 8009)
+ elb_listener_rules.current_rules = current_rules
+ rules_to_add, rules_to_modify, rules_to_delete, rules_to_set_priority = elb_listener_rules.compare_rules()
+
+ assert sorted(rules_to_add, key=lambda x: x.get("Priority", 0)) == sorted(
+ expected.get("to_add", []), key=lambda x: x.get("Priority", 0)
+ )
+ assert sorted(rules_to_modify, key=lambda x: x.get("Priority", 0)) == sorted(
+ expected.get("to_modify", []), key=lambda x: x.get("Priority", 0)
+ )
+ assert sorted(rules_to_set_priority, key=lambda x: x.get("Priority", 0)) == sorted(
+ expected.get("to_set_priority", []), key=lambda x: x.get("Priority", 0)
+ )
+ assert sorted(rules_to_delete) == sorted(expected.get("to_delete", []))
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_prune.py b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_prune.py
index 3a02b9e2e..96d1dbbc8 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_prune.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/elbv2/test_prune.py
@@ -4,15 +4,12 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
from ansible_collections.amazon.aws.plugins.module_utils import elbv2
-example_arn = 'arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-123456789abc/abcdef0123456789'
-example_arn2 = 'arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-0123456789ab/0123456789abcdef'
+example_arn = "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-123456789abc/abcdef0123456789"
+example_arn2 = "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/nlb-0123456789ab/0123456789abcdef"
one_action = [
dict(
@@ -20,9 +17,10 @@ one_action = [
TargetGroupStickinessConfig=dict(Enabled=False),
TargetGroups=[
dict(TargetGroupArn=example_arn, Weight=1),
- ]
+ ],
),
- TargetGroupArn=example_arn, Type='forward',
+ TargetGroupArn=example_arn,
+ Type="forward",
)
]
@@ -33,110 +31,157 @@ one_action_two_tg = [
TargetGroups=[
dict(TargetGroupArn=example_arn, Weight=1),
dict(TargetGroupArn=example_arn2, Weight=1),
- ]
+ ],
),
- TargetGroupArn=example_arn, Type='forward',
+ TargetGroupArn=example_arn,
+ Type="forward",
)
]
-simplified_action = dict(Type='forward', TargetGroupArn=example_arn)
+simplified_action = dict(Type="forward", TargetGroupArn=example_arn)
# Examples of various minimalistic actions which are all the same
simple_actions = [
- dict(Type='forward', TargetGroupArn=example_arn),
-
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn)])),
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)])),
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)])),
-
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False),
- TargetGroups=[dict(TargetGroupArn=example_arn)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn)])),
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False),
- TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)])),
- dict(Type='forward', TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False),
- TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)])),
- dict(Type='forward', ForwardConfig=dict(TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)])),
+ dict(Type="forward", TargetGroupArn=example_arn),
+ dict(
+ Type="forward", TargetGroupArn=example_arn, ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn)])
+ ),
+ dict(Type="forward", ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn)])),
+ dict(
+ Type="forward",
+ TargetGroupArn=example_arn,
+ ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)]),
+ ),
+ dict(Type="forward", ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)])),
+ dict(
+ Type="forward",
+ TargetGroupArn=example_arn,
+ ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)]),
+ ),
+ dict(Type="forward", ForwardConfig=dict(TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)])),
+ dict(
+ Type="forward",
+ TargetGroupArn=example_arn,
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn)]
+ ),
+ ),
+ dict(
+ Type="forward",
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn)]
+ ),
+ ),
+ dict(
+ Type="forward",
+ TargetGroupArn=example_arn,
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)]
+ ),
+ ),
+ dict(
+ Type="forward",
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=1)]
+ ),
+ ),
+ dict(
+ Type="forward",
+ TargetGroupArn=example_arn,
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)]
+ ),
+ ),
+ dict(
+ Type="forward",
+ ForwardConfig=dict(
+ TargetGroupStickinessConfig=dict(Enabled=False), TargetGroups=[dict(TargetGroupArn=example_arn, Weight=42)]
+ ),
+ ),
]
# Test that _prune_ForwardConfig() doesn't mangle things we don't expect
complex_actions = [
# Non-Forwarding
dict(
- Type='authenticate-oidc', TargetGroupArn=example_arn,
+ Type="authenticate-oidc",
+ TargetGroupArn=example_arn,
AuthenticateOidcConfig=dict(
- Issuer='https://idp.ansible.test/oidc-config',
- AuthorizationEndpoint='https://idp.ansible.test/authz',
- TokenEndpoint='https://idp.ansible.test/token',
- UserInfoEndpoint='https://idp.ansible.test/user',
- ClientId='ExampleClient',
+ Issuer="https://idp.ansible.test/oidc-config",
+ AuthorizationEndpoint="https://idp.ansible.test/authz",
+ TokenEndpoint="https://idp.ansible.test/token",
+ UserInfoEndpoint="https://idp.ansible.test/user",
+ ClientId="ExampleClient",
UseExistingClientSecret=False,
),
),
dict(
- Type='redirect',
- RedirectConfig=dict(Protocol='HTTPS', Port=443, Host='redirect.ansible.test', Path='/', StatusCode='HTTP_302'),
+ Type="redirect",
+ RedirectConfig=dict(Protocol="HTTPS", Port=443, Host="redirect.ansible.test", Path="/", StatusCode="HTTP_302"),
),
# Multiple TGs
dict(
- TargetGroupArn=example_arn, Type='forward',
+ TargetGroupArn=example_arn,
+ Type="forward",
ForwardConfig=dict(
TargetGroupStickinessConfig=dict(Enabled=False),
TargetGroups=[
dict(TargetGroupArn=example_arn, Weight=1),
dict(TargetGroupArn=example_arn2, Weight=1),
- ]
+ ],
),
),
# Sticky-Sessions
dict(
- Type='forward', TargetGroupArn=example_arn,
+ Type="forward",
+ TargetGroupArn=example_arn,
ForwardConfig=dict(
TargetGroupStickinessConfig=dict(Enabled=True, DurationSeconds=3600),
- TargetGroups=[dict(TargetGroupArn=example_arn)]
- )
+ TargetGroups=[dict(TargetGroupArn=example_arn)],
+ ),
),
]
simplified_oidc_action = dict(
- Type='authenticate-oidc', TargetGroupArn=example_arn,
+ Type="authenticate-oidc",
+ TargetGroupArn=example_arn,
AuthenticateOidcConfig=dict(
- Issuer='https://idp.ansible.test/oidc-config',
- AuthorizationEndpoint='https://idp.ansible.test/authz',
- TokenEndpoint='https://idp.ansible.test/token',
- UserInfoEndpoint='https://idp.ansible.test/user',
- ClientId='ExampleClient',
- Scope='openid',
+ Issuer="https://idp.ansible.test/oidc-config",
+ AuthorizationEndpoint="https://idp.ansible.test/authz",
+ TokenEndpoint="https://idp.ansible.test/token",
+ UserInfoEndpoint="https://idp.ansible.test/user",
+ ClientId="ExampleClient",
+ Scope="openid",
SessionTimeout=604800,
UseExistingClientSecret=True,
+ OnUnauthenticatedRequest="authenticate",
+ SessionCookieName="AWSELBAuthSessionCookie",
),
)
oidc_actions = [
dict(
- Type='authenticate-oidc', TargetGroupArn=example_arn,
+ Type="authenticate-oidc",
+ TargetGroupArn=example_arn,
AuthenticateOidcConfig=dict(
- Issuer='https://idp.ansible.test/oidc-config',
- AuthorizationEndpoint='https://idp.ansible.test/authz',
- TokenEndpoint='https://idp.ansible.test/token',
- UserInfoEndpoint='https://idp.ansible.test/user',
- ClientId='ExampleClient',
+ Issuer="https://idp.ansible.test/oidc-config",
+ AuthorizationEndpoint="https://idp.ansible.test/authz",
+ TokenEndpoint="https://idp.ansible.test/token",
+ UserInfoEndpoint="https://idp.ansible.test/user",
+ ClientId="ExampleClient",
UseExistingClientSecret=True,
- Scope='openid',
- SessionTimeout=604800
+ Scope="openid",
+ SessionTimeout=604800,
),
),
dict(
- Type='authenticate-oidc', TargetGroupArn=example_arn,
+ Type="authenticate-oidc",
+ TargetGroupArn=example_arn,
AuthenticateOidcConfig=dict(
- Issuer='https://idp.ansible.test/oidc-config',
- AuthorizationEndpoint='https://idp.ansible.test/authz',
- TokenEndpoint='https://idp.ansible.test/token',
- UserInfoEndpoint='https://idp.ansible.test/user',
- ClientId='ExampleClient',
- ClientSecret='MyVerySecretString',
+ Issuer="https://idp.ansible.test/oidc-config",
+ AuthorizationEndpoint="https://idp.ansible.test/authz",
+ TokenEndpoint="https://idp.ansible.test/token",
+ UserInfoEndpoint="https://idp.ansible.test/user",
+ ClientId="ExampleClient",
+ ClientSecret="MyVerySecretString",
UseExistingClientSecret=True,
),
),
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_common_handler.py b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_common_handler.py
new file mode 100644
index 000000000..3a3cc41b9
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_common_handler.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: Contributors to the Ansible project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.errors import AWSErrorHandler
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleAWSError
+
+if not HAS_BOTO3:
+ pytestmark = pytest.mark.skip("test_common_handler.py requires the python modules 'boto3' and 'botocore'")
+
+
+class AnsibleAWSExampleError(AnsibleAWSError):
+ pass
+
+
+class AWSExampleErrorHandler(AWSErrorHandler):
+ _CUSTOM_EXCEPTION = AnsibleAWSExampleError
+
+ @classmethod
+ def _is_missing(cls):
+ # Shouldn't be called by the 'common' handler
+ assert False, "_is_missing() should not be called by common_error_handler"
+
+
+class TestAwsCommonHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @AWSErrorHandler.common_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_no_failures_no_missing(self):
+ self.counter = 0
+
+ @AWSExampleErrorHandler.common_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSErrorHandler.common_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_custom_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSExampleErrorHandler.common_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSExampleError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_deletion_handler.py b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_deletion_handler.py
new file mode 100644
index 000000000..adc08f6c1
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_deletion_handler.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: Contributors to the Ansible project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_code
+from ansible_collections.amazon.aws.plugins.module_utils.errors import AWSErrorHandler
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleAWSError
+
+if not HAS_BOTO3:
+ pytestmark = pytest.mark.skip("test_deletion_handler.py requires the python modules 'boto3' and 'botocore'")
+
+
+class AnsibleAWSExampleError(AnsibleAWSError):
+ pass
+
+
+class AWSExampleErrorHandler(AWSErrorHandler):
+ _CUSTOM_EXCEPTION = AnsibleAWSExampleError
+
+ @classmethod
+ def _is_missing(cls):
+ return is_boto3_error_code("NoSuchEntity")
+
+
+class AWSCleanErrorHandler(AWSErrorHandler):
+ @classmethod
+ def _is_missing(cls):
+ # Shouldn't be called if there's no error
+ assert False, "_is_missing() should not be called when no errors occurred"
+
+
+class TestAWSDeletionHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @AWSErrorHandler.deletion_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_no_failures_no_missing(self):
+ self.counter = 0
+
+ @AWSCleanErrorHandler.deletion_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_no_missing_client_error(self):
+ # If _is_missing() hasn't been overridden we do nothing interesting
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @AWSErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ with pytest.raises(AnsibleAWSError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "I couldn't find it" in str(raised.exception)
+
+ def test_ignore_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @AWSExampleErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ ret_val = raise_client_error()
+ assert self.counter == 1
+ assert ret_val is False
+
+ def test_custom_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSExampleErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSExampleError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_list_handler.py b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_list_handler.py
new file mode 100644
index 000000000..4f9d276f6
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/errors/aws_error_handler/test_list_handler.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: Contributors to the Ansible project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import is_boto3_error_code
+from ansible_collections.amazon.aws.plugins.module_utils.errors import AWSErrorHandler
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleAWSError
+
+if not HAS_BOTO3:
+ pytestmark = pytest.mark.skip("test_list_handler.py requires the python modules 'boto3' and 'botocore'")
+
+
+class AnsibleAWSExampleError(AnsibleAWSError):
+ pass
+
+
+class AWSExampleErrorHandler(AWSErrorHandler):
+ _CUSTOM_EXCEPTION = AnsibleAWSExampleError
+
+ @classmethod
+ def _is_missing(cls):
+ return is_boto3_error_code("NoSuchEntity")
+
+
+class AWSCleanErrorHandler(AWSErrorHandler):
+ @classmethod
+ def _is_missing(cls):
+ # Shouldn't be called if there's no error
+ assert False, "_is_missing() should not be called when no errors occurred"
+
+
+class TestAWSListHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @AWSErrorHandler.list_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_no_missing_client_error(self):
+ # If _is_missing() hasn't been overridden we do nothing interesting
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @AWSErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_list_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @AWSExampleErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ ret_val = raise_client_error()
+ assert self.counter == 1
+ assert ret_val is None
+
+ def test_list_error_custom_return(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @AWSExampleErrorHandler.list_error_handler("do something", [])
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ ret_val = raise_client_error()
+ assert self.counter == 1
+ assert ret_val == []
+
+ def test_custom_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @AWSExampleErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleAWSExampleError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/test_exceptions.py b/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/test_exceptions.py
new file mode 100644
index 000000000..a2979f848
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/exceptions/test_exceptions.py
@@ -0,0 +1,101 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.module_utils.exceptions as aws_exceptions
+
+
+@pytest.fixture
+def utils_exceptions():
+ return aws_exceptions
+
+
+def test_with_kwargs(utils_exceptions):
+ nested_exception = Exception(sentinel.EXCEPTION)
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(kw1=sentinel.KW1, kw2=sentinel.KW2)
+ assert str(e.value) == ""
+ assert e.value.exception is None
+ assert e.value.message is None
+ assert e.value.kwargs == dict(kw1=sentinel.KW1, kw2=sentinel.KW2)
+
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(
+ message=sentinel.MESSAGE, exception=nested_exception, kw1=sentinel.KW1, kw2=sentinel.KW2
+ )
+ assert str(e.value) == "sentinel.MESSAGE: sentinel.EXCEPTION"
+ assert e.value.exception is nested_exception
+ assert e.value.message is sentinel.MESSAGE
+ assert e.value.kwargs == dict(kw1=sentinel.KW1, kw2=sentinel.KW2)
+
+
+def test_with_both(utils_exceptions):
+ nested_exception = Exception(sentinel.EXCEPTION)
+
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(message=sentinel.MESSAGE, exception=nested_exception)
+ assert str(e.value) == "sentinel.MESSAGE: sentinel.EXCEPTION"
+ assert e.value.exception is nested_exception
+ assert e.value.message is sentinel.MESSAGE
+ assert e.value.kwargs == {}
+
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(sentinel.MESSAGE, exception=nested_exception)
+ assert str(e.value) == "sentinel.MESSAGE: sentinel.EXCEPTION"
+ assert e.value.exception is nested_exception
+ assert e.value.message is sentinel.MESSAGE
+ assert e.value.kwargs == {}
+
+
+def test_with_exception(utils_exceptions):
+ nested_exception = Exception(sentinel.EXCEPTION)
+
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(exception=nested_exception)
+ assert str(e.value) == "sentinel.EXCEPTION"
+ assert e.value.exception is nested_exception
+ assert e.value.message is None
+ assert e.value.kwargs == {}
+
+
+def test_with_message(utils_exceptions):
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(message=sentinel.MESSAGE)
+ assert str(e.value) == "sentinel.MESSAGE"
+ assert e.value.exception is None
+ assert e.value.message is sentinel.MESSAGE
+ assert e.value.kwargs == {}
+
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError(sentinel.MESSAGE)
+ assert str(e.value) == "sentinel.MESSAGE"
+ assert e.value.exception is None
+ assert e.value.message is sentinel.MESSAGE
+ assert e.value.kwargs == {}
+
+
+def test_empty(utils_exceptions):
+ with pytest.raises(utils_exceptions.AnsibleAWSError) as e:
+ raise utils_exceptions.AnsibleAWSError()
+ assert str(e.value) == ""
+ assert e.value.exception is None
+ assert e.value.message is None
+ assert e.value.kwargs == {}
+
+
+def test_inheritence(utils_exceptions):
+ aws_exception = utils_exceptions.AnsibleAWSError()
+
+ assert isinstance(aws_exception, Exception)
+ assert isinstance(aws_exception, utils_exceptions.AnsibleAWSError)
+
+ botocore_exception = utils_exceptions.AnsibleBotocoreError()
+
+ assert isinstance(botocore_exception, Exception)
+ assert isinstance(botocore_exception, utils_exceptions.AnsibleAWSError)
+ assert isinstance(botocore_exception, utils_exceptions.AnsibleBotocoreError)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_iam_error_handler.py b/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_iam_error_handler.py
new file mode 100644
index 000000000..7da8f6e0d
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_iam_error_handler.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: Contributors to the Ansible project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.iam import AnsibleIAMError
+from ansible_collections.amazon.aws.plugins.module_utils.iam import IAMErrorHandler
+
+if not HAS_BOTO3:
+ pytestmark = pytest.mark.skip("test_iam_error_handler.py requires the python modules 'boto3' and 'botocore'")
+
+
+class TestIamDeletionHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @IAMErrorHandler.deletion_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @IAMErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleIAMError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_ignore_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @IAMErrorHandler.deletion_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ ret_val = raise_client_error()
+ assert self.counter == 1
+ assert ret_val is False
+
+
+class TestIamListHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @IAMErrorHandler.list_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @IAMErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleIAMError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
+
+ def test_list_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "NoSuchEntity"}}
+
+ @IAMErrorHandler.list_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "I couldn't find it")
+
+ ret_val = raise_client_error()
+ assert self.counter == 1
+ assert ret_val is None
+
+
+class TestIamCommonHandler:
+ def test_no_failures(self):
+ self.counter = 0
+
+ @IAMErrorHandler.common_error_handler("no error")
+ def no_failures():
+ self.counter += 1
+
+ no_failures()
+ assert self.counter == 1
+
+ def test_client_error(self):
+ self.counter = 0
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
+
+ @IAMErrorHandler.common_error_handler("do something")
+ def raise_client_error():
+ self.counter += 1
+ raise botocore.exceptions.ClientError(err_response, "Something bad")
+
+ with pytest.raises(AnsibleIAMError) as e_info:
+ raise_client_error()
+ assert self.counter == 1
+ raised = e_info.value
+ assert isinstance(raised.exception, botocore.exceptions.ClientError)
+ assert "do something" in raised.message
+ assert "Something bad" in str(raised.exception)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_validate_iam_identifiers.py b/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_validate_iam_identifiers.py
new file mode 100644
index 000000000..d5a0436f9
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/iam/test_validate_iam_identifiers.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: Contributors to the Ansible project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.iam import validate_iam_identifiers
+
+# See also: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-quotas.html
+validate_test_data = [
+ (
+ dict(), # Input
+ None, # Output role
+ None, # Output user
+ None, # Output generic
+ ),
+ (dict(path="/"), None, None, None),
+ (dict(name="Example"), None, None, None),
+ # Path tests
+ (
+ dict(path="/12345abcd"),
+ "path must begin and end with /",
+ "path must begin and end with /",
+ "path must begin and end with /",
+ ),
+ (dict(path="/12345abcd/"), None, None, None),
+ (dict(path=f"/{'12345abcd0' * 51}/"), None, None, None), # Max length 512 chars
+ (
+ dict(path=f"/{'12345abcd/' * 51}a/"),
+ "path may not exceed 512",
+ "path may not exceed 512",
+ "path may not exceed 512",
+ ),
+ (dict(path="/12345+=,.@_-abcd/"), None, None, None), # limited allowed special characters
+ (dict(path="/12345&abcd/"), "path must match pattern", "path must match pattern", "path must match pattern"),
+ (dict(path="/12345:abcd/"), "path must match pattern", "path must match pattern", "path must match pattern"),
+ # Name tests
+ (dict(name="12345abcd"), None, None, None),
+ (dict(name=f"{'12345abcd0' * 6}1234"), None, None, None), # Max length
+ (dict(name=f"{'12345abcd0' * 6}12345"), "name may not exceed 64", "name may not exceed 64", None),
+ (dict(name=f"{'12345abcd0' * 12}12345678"), "name may not exceed 64", "name may not exceed 64", None),
+ (
+ dict(name=f"{'12345abcd0' * 12}123456789"),
+ "name may not exceed 64",
+ "name may not exceed 64",
+ "name may not exceed 128",
+ ),
+ (dict(name="12345+=,.@_-abcd"), None, None, None), # limited allowed special characters
+ (dict(name="12345&abcd"), "name must match pattern", "name must match pattern", "name must match pattern"),
+ (dict(name="12345:abcd"), "name must match pattern", "name must match pattern", "name must match pattern"),
+ (dict(name="/12345/abcd/"), "name must match pattern", "name must match pattern", "name must match pattern"),
+ # Dual tests
+ (dict(path="/example/", name="Example"), None, None, None),
+ (dict(path="/exa:ple/", name="Example"), "path", "path", "path"),
+ (dict(path="/example/", name="Exa:ple"), "name", "name", "name"),
+]
+
+
+@pytest.mark.parametrize("input_params, output_role, output_user, output_generic", validate_test_data)
+def test_scrub_none_parameters(input_params, output_role, output_user, output_generic):
+ # Role and User have additional length constraints
+ return_role = validate_iam_identifiers("role", **input_params)
+ return_user = validate_iam_identifiers("user", **input_params)
+ return_generic = validate_iam_identifiers("generic", **input_params)
+
+ if output_role is None:
+ assert return_role is None
+ else:
+ assert return_role is not None
+ assert output_role in return_role
+ if output_user is None:
+ assert return_user is None
+ else:
+ assert return_user is not None
+ assert output_user in return_user
+
+ # Defaults
+ if output_generic is None:
+ assert return_generic is None
+ else:
+ assert return_generic is not None
+ assert output_generic in return_generic
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_fail_json_aws.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_fail_json_aws.py
index 51e64490f..8a6fc96ec 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_fail_json_aws.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_fail_json_aws.py
@@ -3,15 +3,13 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import json
+
import pytest
try:
- import botocore
import boto3
+ import botocore
except ImportError:
pass
@@ -24,17 +22,14 @@ if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_fail_json_aws.py requires the python modules 'boto3' and 'botocore'")
-class TestFailJsonAwsTestSuite(object):
+class TestFailJsonAwsTestSuite:
# ========================================================
# Prepare some data for use in our testing
# ========================================================
def setup_method(self):
# Basic information that ClientError needs to spawn off an error
self.EXAMPLE_EXCEPTION_DATA = {
- "Error": {
- "Code": "InvalidParameterValue",
- "Message": "The filter 'exampleFilter' is invalid"
- },
+ "Error": {"Code": "InvalidParameterValue", "Message": "The filter 'exampleFilter' is invalid"},
"ResponseMetadata": {
"RequestId": "01234567-89ab-cdef-0123-456789abcdef",
"HTTPStatusCode": 400,
@@ -42,15 +37,18 @@ class TestFailJsonAwsTestSuite(object):
"transfer-encoding": "chunked",
"date": "Fri, 13 Nov 2020 00:00:00 GMT",
"connection": "close",
- "server": "AmazonEC2"
+ "server": "AmazonEC2",
},
- "RetryAttempts": 0
- }
+ "RetryAttempts": 0,
+ },
}
self.CAMEL_RESPONSE = camel_dict_to_snake_dict(self.EXAMPLE_EXCEPTION_DATA.get("ResponseMetadata"))
self.CAMEL_ERROR = camel_dict_to_snake_dict(self.EXAMPLE_EXCEPTION_DATA.get("Error"))
# ClientError(EXAMPLE_EXCEPTION_DATA, "testCall") will generate this
- self.EXAMPLE_MSG = "An error occurred (InvalidParameterValue) when calling the testCall operation: The filter 'exampleFilter' is invalid"
+ self.EXAMPLE_MSG = (
+ "An error occurred (InvalidParameterValue) when calling the testCall operation: The filter 'exampleFilter'"
+ " is invalid"
+ )
self.DEFAULT_CORE_MSG = "An unspecified error occurred"
self.FAIL_MSG = "I Failed!"
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_minimal_versions.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_minimal_versions.py
index 17e69ecb5..32210054b 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_minimal_versions.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_minimal_versions.py
@@ -3,17 +3,14 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
+import json
from pprint import pprint
+
import pytest
-import json
-import warnings
try:
- import botocore
import boto3
+ import botocore
except ImportError:
pass
@@ -24,15 +21,15 @@ if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_minimal_versions.py requires the python modules 'boto3' and 'botocore'")
-class TestMinimalVersionTestSuite(object):
+class TestMinimalVersionTestSuite:
# ========================================================
# Prepare some data for use in our testing
# ========================================================
def setup_method(self):
- self.MINIMAL_BOTO3 = '1.18.0'
- self.MINIMAL_BOTOCORE = '1.21.0'
- self.OLD_BOTO3 = '1.17.999'
- self.OLD_BOTOCORE = '1.20.999'
+ self.MINIMAL_BOTO3 = "1.26.0"
+ self.MINIMAL_BOTOCORE = "1.29.0"
+ self.OLD_BOTO3 = "1.25.999"
+ self.OLD_BOTOCORE = "1.28.999"
# ========================================================
# Test we don't warn when using valid versions
@@ -110,7 +107,7 @@ class TestMinimalVersionTestSuite(object):
assert len(warnings) == 1
# Assert that we have a warning about the version but be
# relaxed about the exact message
- assert 'boto3' in warnings[0]
+ assert "boto3" in warnings[0]
assert self.MINIMAL_BOTO3 in warnings[0]
# ========================================================
@@ -143,7 +140,7 @@ class TestMinimalVersionTestSuite(object):
assert len(warnings) == 1
# Assert that we have a warning about the version but be
# relaxed about the exact message
- assert 'botocore' in warnings[0]
+ assert "botocore" in warnings[0]
assert self.MINIMAL_BOTOCORE in warnings[0]
# ========================================================
@@ -178,14 +175,14 @@ class TestMinimalVersionTestSuite(object):
warning_dict = dict()
for warning in warnings:
- if 'boto3' in warning:
- warning_dict['boto3'] = warning
- if 'botocore' in warning:
- warning_dict['botocore'] = warning
+ if "boto3" in warning:
+ warning_dict["boto3"] = warning
+ if "botocore" in warning:
+ warning_dict["botocore"] = warning
# Assert that we have a warning about the version but be
# relaxed about the exact message
- assert warning_dict.get('boto3') is not None
- assert self.MINIMAL_BOTO3 in warning_dict.get('boto3')
- assert warning_dict.get('botocore') is not None
- assert self.MINIMAL_BOTOCORE in warning_dict.get('botocore')
+ assert warning_dict.get("boto3") is not None
+ assert self.MINIMAL_BOTO3 in warning_dict.get("boto3")
+ assert warning_dict.get("botocore") is not None
+ assert self.MINIMAL_BOTOCORE in warning_dict.get("botocore")
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_passthrough.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_passthrough.py
new file mode 100644
index 000000000..c61de1391
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_passthrough.py
@@ -0,0 +1,209 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import warnings
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.module_utils.modules as utils_module
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_params(monkeypatch, stdin):
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "params", sentinel.RETURNED_PARAMS)
+
+ assert aws_module.params is sentinel.RETURNED_PARAMS
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_debug(monkeypatch, stdin):
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "debug", warnings.warn)
+
+ with pytest.warns(UserWarning, match="My debug message"):
+ aws_module.debug("My debug message")
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_warn(monkeypatch, stdin):
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "warn", warnings.warn)
+
+ with pytest.warns(UserWarning, match="My warning message"):
+ aws_module.warn("My warning message")
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_deprecate(monkeypatch, stdin):
+ kwargs = {"example": sentinel.KWARG}
+ deprecate = MagicMock(name="deprecate")
+ deprecate.return_value = sentinel.RET_DEPRECATE
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "deprecate", deprecate)
+ assert aws_module.deprecate(sentinel.PARAM_DEPRECATE, **kwargs) is sentinel.RET_DEPRECATE
+ assert deprecate.call_args == call(sentinel.PARAM_DEPRECATE, **kwargs)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_gather_versions(monkeypatch, stdin):
+ gather_sdk_versions = MagicMock(name="gather_sdk_versions")
+ gather_sdk_versions.return_value = sentinel.RETURNED_SDK_VERSIONS
+ monkeypatch.setattr(utils_module, "gather_sdk_versions", gather_sdk_versions)
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+
+ assert aws_module._gather_versions() is sentinel.RETURNED_SDK_VERSIONS
+ assert gather_sdk_versions.call_args == call()
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_region(monkeypatch, stdin):
+ get_aws_region = MagicMock(name="get_aws_region")
+ get_aws_region.return_value = sentinel.RETURNED_REGION
+ monkeypatch.setattr(utils_module, "get_aws_region", get_aws_region)
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+
+ assert aws_module.region is sentinel.RETURNED_REGION
+ assert get_aws_region.call_args == call(aws_module, True)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_boto3_at_least(monkeypatch, stdin):
+ boto3_at_least = MagicMock(name="boto3_at_least")
+ boto3_at_least.return_value = sentinel.RET_BOTO3_AT_LEAST
+ monkeypatch.setattr(utils_module, "boto3_at_least", boto3_at_least)
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ assert aws_module.boto3_at_least(sentinel.PARAM_BOTO3) is sentinel.RET_BOTO3_AT_LEAST
+ assert boto3_at_least.call_args == call(sentinel.PARAM_BOTO3)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_botocore_at_least(monkeypatch, stdin):
+ botocore_at_least = MagicMock(name="botocore_at_least")
+ botocore_at_least.return_value = sentinel.RET_BOTOCORE_AT_LEAST
+ monkeypatch.setattr(utils_module, "botocore_at_least", botocore_at_least)
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ assert aws_module.botocore_at_least(sentinel.PARAM_BOTOCORE) is sentinel.RET_BOTOCORE_AT_LEAST
+ assert botocore_at_least.call_args == call(sentinel.PARAM_BOTOCORE)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_boolean(monkeypatch, stdin):
+ boolean = MagicMock(name="boolean")
+ boolean.return_value = sentinel.RET_BOOLEAN
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "boolean", boolean)
+ assert aws_module.boolean(sentinel.PARAM_BOOLEAN) is sentinel.RET_BOOLEAN
+ assert boolean.call_args == call(sentinel.PARAM_BOOLEAN)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_md5(monkeypatch, stdin):
+ md5 = MagicMock(name="md5")
+ md5.return_value = sentinel.RET_MD5
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ monkeypatch.setattr(aws_module._module, "md5", md5)
+ assert aws_module.md5(sentinel.PARAM_MD5) is sentinel.RET_MD5
+ assert md5.call_args == call(sentinel.PARAM_MD5)
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_client_no_wrapper(monkeypatch, stdin):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_module, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_module, "boto3_conn", boto3_conn)
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ assert aws_module.client(sentinel.PARAM_SERVICE) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(aws_module, boto3=True)
+ assert boto3_conn.call_args == call(
+ aws_module,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_client_wrapper(monkeypatch, stdin):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_module, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_module, "boto3_conn", boto3_conn)
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ wrapped_conn = aws_module.client(sentinel.PARAM_SERVICE, sentinel.PARAM_WRAPPER)
+ assert wrapped_conn.client is sentinel.BOTO3_CONN
+ assert wrapped_conn.retry is sentinel.PARAM_WRAPPER
+ assert get_aws_connection_info.call_args == call(aws_module, boto3=True)
+ assert boto3_conn.call_args == call(
+ aws_module,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+ # Check that we can override parameters
+ wrapped_conn = aws_module.client(sentinel.PARAM_SERVICE, sentinel.PARAM_WRAPPER, region=sentinel.PARAM_REGION)
+ assert wrapped_conn.client is sentinel.BOTO3_CONN
+ assert wrapped_conn.retry is sentinel.PARAM_WRAPPER
+ assert get_aws_connection_info.call_args == call(aws_module, boto3=True)
+ assert boto3_conn.call_args == call(
+ aws_module,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+
+@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
+def test_resource(monkeypatch, stdin):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_module, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_module, "boto3_conn", boto3_conn)
+
+ aws_module = utils_module.AnsibleAWSModule(argument_spec=dict())
+ assert aws_module.resource(sentinel.PARAM_SERVICE) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(aws_module, boto3=True)
+ assert boto3_conn.call_args == call(
+ aws_module,
+ conn_type="resource",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+ # Check that we can override parameters
+ assert aws_module.resource(sentinel.PARAM_SERVICE, region=sentinel.PARAM_REGION) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(aws_module, boto3=True)
+ assert boto3_conn.call_args == call(
+ aws_module,
+ conn_type="resource",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_require_at_least.py b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_require_at_least.py
index adf2bf558..c383a4267 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_require_at_least.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/modules/ansible_aws_module/test_require_at_least.py
@@ -3,15 +3,13 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import json
+
import pytest
try:
- import botocore
import boto3
+ import botocore
except ImportError:
# Handled by HAS_BOTO3
pass
@@ -19,32 +17,32 @@ except ImportError:
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
from ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleAWSModule
-DUMMY_VERSION = '5.5.5.5'
+DUMMY_VERSION = "5.5.5.5"
TEST_VERSIONS = [
- ['1.1.1', '2.2.2', True],
- ['1.1.1', '0.0.1', False],
- ['9.9.9', '9.9.9', True],
- ['9.9.9', '9.9.10', True],
- ['9.9.9', '9.10.9', True],
- ['9.9.9', '10.9.9', True],
- ['9.9.9', '9.9.8', False],
- ['9.9.9', '9.8.9', False],
- ['9.9.9', '8.9.9', False],
- ['10.10.10', '10.10.10', True],
- ['10.10.10', '10.10.11', True],
- ['10.10.10', '10.11.10', True],
- ['10.10.10', '11.10.10', True],
- ['10.10.10', '10.10.9', False],
- ['10.10.10', '10.9.10', False],
- ['10.10.10', '9.19.10', False],
+ ["1.1.1", "2.2.2", True],
+ ["1.1.1", "0.0.1", False],
+ ["9.9.9", "9.9.9", True],
+ ["9.9.9", "9.9.10", True],
+ ["9.9.9", "9.10.9", True],
+ ["9.9.9", "10.9.9", True],
+ ["9.9.9", "9.9.8", False],
+ ["9.9.9", "9.8.9", False],
+ ["9.9.9", "8.9.9", False],
+ ["10.10.10", "10.10.10", True],
+ ["10.10.10", "10.10.11", True],
+ ["10.10.10", "10.11.10", True],
+ ["10.10.10", "11.10.10", True],
+ ["10.10.10", "10.10.9", False],
+ ["10.10.10", "10.9.10", False],
+ ["10.10.10", "9.19.10", False],
]
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_require_at_least.py requires the python modules 'boto3' and 'botocore'")
-class TestRequireAtLeastTestSuite(object):
+class TestRequireAtLeastTestSuite:
# ========================================================
# Prepare some data for use in our testing
# ========================================================
@@ -54,7 +52,9 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test botocore_at_least
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
def test_botocore_at_least(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
monkeypatch.setattr(botocore, "__version__", compare_version)
# Set boto3 version to a known value (tests are on both sides) to make
@@ -69,7 +69,9 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test boto3_at_least
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
def test_boto3_at_least(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
# Set botocore version to a known value (tests are on both sides) to make
# sure we're comparing the right library
@@ -84,7 +86,9 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test require_botocore_at_least
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
def test_require_botocore_at_least(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
monkeypatch.setattr(botocore, "__version__", compare_version)
# Set boto3 version to a known value (tests are on both sides) to make
@@ -117,7 +121,9 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test require_boto3_at_least
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
def test_require_boto3_at_least(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
monkeypatch.setattr(botocore, "__version__", DUMMY_VERSION)
# Set boto3 version to a known value (tests are on both sides) to make
@@ -150,14 +156,18 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test require_botocore_at_least with reason
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
- def test_require_botocore_at_least_with_reason(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
+ def test_require_botocore_at_least_with_reason(
+ self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd
+ ):
monkeypatch.setattr(botocore, "__version__", compare_version)
# Set boto3 version to a known value (tests are on both sides) to make
# sure we're comparing the right library
monkeypatch.setattr(boto3, "__version__", DUMMY_VERSION)
- reason = 'testing in progress'
+ reason = "testing in progress"
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
@@ -178,7 +188,7 @@ class TestRequireAtLeastTestSuite(object):
# The message is generated by Ansible, don't test for an exact
# message
assert desired_version in return_val.get("msg")
- assert " {0}".format(reason) in return_val.get("msg")
+ assert f" {reason}" in return_val.get("msg")
assert "botocore" in return_val.get("msg")
assert return_val.get("boto3_version") == DUMMY_VERSION
assert return_val.get("botocore_version") == compare_version
@@ -186,14 +196,18 @@ class TestRequireAtLeastTestSuite(object):
# ========================================================
# Test require_boto3_at_least with reason
# ========================================================
- @pytest.mark.parametrize("stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"])
- def test_require_boto3_at_least_with_reason(self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd):
+ @pytest.mark.parametrize(
+ "stdin, desired_version, compare_version, at_least", [({}, *d) for d in TEST_VERSIONS], indirect=["stdin"]
+ )
+ def test_require_boto3_at_least_with_reason(
+ self, monkeypatch, stdin, desired_version, compare_version, at_least, capfd
+ ):
monkeypatch.setattr(botocore, "__version__", DUMMY_VERSION)
# Set boto3 version to a known value (tests are on both sides) to make
# sure we're comparing the right library
monkeypatch.setattr(boto3, "__version__", compare_version)
- reason = 'testing in progress'
+ reason = "testing in progress"
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
@@ -214,7 +228,7 @@ class TestRequireAtLeastTestSuite(object):
# The message is generated by Ansible, don't test for an exact
# message
assert desired_version in return_val.get("msg")
- assert " {0}".format(reason) in return_val.get("msg")
+ assert f" {reason}" in return_val.get("msg")
assert "boto3" in return_val.get("msg")
assert return_val.get("botocore_version") == DUMMY_VERSION
assert return_val.get("boto3_version") == compare_version
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_canonicalize.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_canonicalize.py
new file mode 100644
index 000000000..120649828
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_canonicalize.py
@@ -0,0 +1,38 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import sentinel
+
+from ansible_collections.amazon.aws.plugins.module_utils.policy import _canonify_policy_dict_item
+from ansible_collections.amazon.aws.plugins.module_utils.policy import _canonify_root_arn
+from ansible_collections.amazon.aws.plugins.module_utils.policy import _tuplify_list
+
+
+def test_tuplify_list():
+ my_list = ["one", 2, sentinel.list_item, False]
+ # Lists are tuplified
+ assert _tuplify_list(my_list) == tuple(my_list)
+ # Other types are not
+ assert _tuplify_list("one") == "one"
+ assert _tuplify_list(2) == 2
+ assert _tuplify_list(sentinel.single_item) is sentinel.single_item
+ assert _tuplify_list(False) is False
+
+
+def test_canonify_root_arn():
+ assert _canonify_root_arn("Some String") == "Some String"
+ assert _canonify_root_arn("123456789012") == "123456789012"
+ assert _canonify_root_arn("arn:aws:iam::123456789012:root") == "123456789012"
+
+
+def test_canonify_policy_dict_item_principal():
+ assert _canonify_policy_dict_item("*", "Principal") == {"AWS": "*"}
+ assert _canonify_policy_dict_item("*", "NotPrincipal") == {"AWS": "*"}
+ assert _canonify_policy_dict_item("*", "AnotherKey") == "*"
+ assert _canonify_policy_dict_item("NotWildCard", "Principal") == "NotWildCard"
+ assert _canonify_policy_dict_item("NotWildCard", "NotPrincipal") == "NotWildCard"
+ assert _canonify_policy_dict_item(sentinel.single_item, "Principal") is sentinel.single_item
+ assert _canonify_policy_dict_item(False, "Principal") is False
+ assert _canonify_policy_dict_item(True, "Principal") is True
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_compare_policies.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_compare_policies.py
index eb6de22db..4f9d86ac3 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_compare_policies.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_compare_policies.py
@@ -3,14 +3,10 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
from ansible_collections.amazon.aws.plugins.module_utils.policy import compare_policies
-class TestComparePolicy():
-
+class TestComparePolicy:
# ========================================================
# Setup some initial data that we can use within our tests
# ========================================================
@@ -18,130 +14,132 @@ class TestComparePolicy():
# A pair of simple IAM Trust relationships using bools, the first a
# native bool the second a quoted string
self.bool_policy_bool = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
"Action": "sts:AssumeRole",
- "Condition": {
- "Bool": {"aws:MultiFactorAuthPresent": True}
- },
+ "Condition": {"Bool": {"aws:MultiFactorAuthPresent": True}},
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::XXXXXXXXXXXX:root"},
- "Sid": "AssumeRoleWithBoolean"
+ "Sid": "AssumeRoleWithBoolean",
}
- ]
+ ],
}
self.bool_policy_string = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
"Action": "sts:AssumeRole",
- "Condition": {
- "Bool": {"aws:MultiFactorAuthPresent": "true"}
- },
+ "Condition": {"Bool": {"aws:MultiFactorAuthPresent": "true"}},
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::XXXXXXXXXXXX:root"},
- "Sid": "AssumeRoleWithBoolean"
+ "Sid": "AssumeRoleWithBoolean",
}
- ]
+ ],
}
# A pair of simple bucket policies using numbers, the first a
# native int the second a quoted string
self.numeric_policy_number = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
"Action": "s3:ListBucket",
- "Condition": {
- "NumericLessThanEquals": {"s3:max-keys": 15}
- },
+ "Condition": {"NumericLessThanEquals": {"s3:max-keys": 15}},
"Effect": "Allow",
"Resource": "arn:aws:s3:::examplebucket",
- "Sid": "s3ListBucketWithNumericLimit"
+ "Sid": "s3ListBucketWithNumericLimit",
}
- ]
+ ],
}
self.numeric_policy_string = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
"Action": "s3:ListBucket",
- "Condition": {
- "NumericLessThanEquals": {"s3:max-keys": "15"}
- },
+ "Condition": {"NumericLessThanEquals": {"s3:max-keys": "15"}},
"Effect": "Allow",
"Resource": "arn:aws:s3:::examplebucket",
- "Sid": "s3ListBucketWithNumericLimit"
+ "Sid": "s3ListBucketWithNumericLimit",
}
- ]
+ ],
}
self.small_policy_one = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
- 'Action': 's3:PutObjectAcl',
- 'Sid': 'AddCannedAcl2',
- 'Resource': 'arn:aws:s3:::test_policy/*',
- 'Effect': 'Allow',
- 'Principal': {'AWS': ['arn:aws:iam::XXXXXXXXXXXX:user/username1', 'arn:aws:iam::XXXXXXXXXXXX:user/username2']}
+ "Action": "s3:PutObjectAcl",
+ "Sid": "AddCannedAcl2",
+ "Resource": "arn:aws:s3:::test_policy/*",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/username1", "arn:aws:iam::XXXXXXXXXXXX:user/username2"]
+ },
}
- ]
+ ],
}
# The same as small_policy_one, except the single resource is in a list and the contents of Statement are jumbled
self.small_policy_two = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
- 'Effect': 'Allow',
- 'Action': 's3:PutObjectAcl',
- 'Principal': {'AWS': ['arn:aws:iam::XXXXXXXXXXXX:user/username1', 'arn:aws:iam::XXXXXXXXXXXX:user/username2']},
- 'Resource': ['arn:aws:s3:::test_policy/*'],
- 'Sid': 'AddCannedAcl2'
+ "Effect": "Allow",
+ "Action": "s3:PutObjectAcl",
+ "Principal": {
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/username1", "arn:aws:iam::XXXXXXXXXXXX:user/username2"]
+ },
+ "Resource": ["arn:aws:s3:::test_policy/*"],
+ "Sid": "AddCannedAcl2",
}
- ]
+ ],
}
self.version_policy_missing = {
- 'Statement': [
+ "Statement": [
{
- 'Action': 's3:PutObjectAcl',
- 'Sid': 'AddCannedAcl2',
- 'Resource': 'arn:aws:s3:::test_policy/*',
- 'Effect': 'Allow',
- 'Principal': {'AWS': ['arn:aws:iam::XXXXXXXXXXXX:user/username1', 'arn:aws:iam::XXXXXXXXXXXX:user/username2']}
+ "Action": "s3:PutObjectAcl",
+ "Sid": "AddCannedAcl2",
+ "Resource": "arn:aws:s3:::test_policy/*",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/username1", "arn:aws:iam::XXXXXXXXXXXX:user/username2"]
+ },
}
]
}
self.version_policy_old = {
- 'Version': '2008-10-17',
- 'Statement': [
+ "Version": "2008-10-17",
+ "Statement": [
{
- 'Action': 's3:PutObjectAcl',
- 'Sid': 'AddCannedAcl2',
- 'Resource': 'arn:aws:s3:::test_policy/*',
- 'Effect': 'Allow',
- 'Principal': {'AWS': ['arn:aws:iam::XXXXXXXXXXXX:user/username1', 'arn:aws:iam::XXXXXXXXXXXX:user/username2']}
+ "Action": "s3:PutObjectAcl",
+ "Sid": "AddCannedAcl2",
+ "Resource": "arn:aws:s3:::test_policy/*",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/username1", "arn:aws:iam::XXXXXXXXXXXX:user/username2"]
+ },
}
- ]
+ ],
}
self.version_policy_new = {
- 'Version': '2012-10-17',
- 'Statement': [
+ "Version": "2012-10-17",
+ "Statement": [
{
- 'Action': 's3:PutObjectAcl',
- 'Sid': 'AddCannedAcl2',
- 'Resource': 'arn:aws:s3:::test_policy/*',
- 'Effect': 'Allow',
- 'Principal': {'AWS': ['arn:aws:iam::XXXXXXXXXXXX:user/username1', 'arn:aws:iam::XXXXXXXXXXXX:user/username2']}
+ "Action": "s3:PutObjectAcl",
+ "Sid": "AddCannedAcl2",
+ "Resource": "arn:aws:s3:::test_policy/*",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/username1", "arn:aws:iam::XXXXXXXXXXXX:user/username2"]
+ },
}
- ]
+ ],
}
self.larger_policy_one = {
@@ -151,26 +149,18 @@ class TestComparePolicy():
"Sid": "Test",
"Effect": "Allow",
"Principal": {
- "AWS": [
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser1",
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"
- ]
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser1", "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]
},
"Action": "s3:PutObjectAcl",
- "Resource": "arn:aws:s3:::test_policy/*"
+ "Resource": "arn:aws:s3:::test_policy/*",
},
{
"Effect": "Allow",
- "Principal": {
- "AWS": "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"
- },
- "Action": [
- "s3:PutObject",
- "s3:PutObjectAcl"
- ],
- "Resource": "arn:aws:s3:::test_policy/*"
- }
- ]
+ "Principal": {"AWS": "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"},
+ "Action": ["s3:PutObject", "s3:PutObjectAcl"],
+ "Resource": "arn:aws:s3:::test_policy/*",
+ },
+ ],
}
# The same as larger_policy_one, except having a list of length 1 and jumbled contents
@@ -178,29 +168,21 @@ class TestComparePolicy():
"Version": "2012-10-17",
"Statement": [
{
- "Principal": {
- "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]
- },
+ "Principal": {"AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]},
"Effect": "Allow",
"Resource": "arn:aws:s3:::test_policy/*",
- "Action": [
- "s3:PutObject",
- "s3:PutObjectAcl"
- ]
+ "Action": ["s3:PutObject", "s3:PutObjectAcl"],
},
{
"Action": "s3:PutObjectAcl",
"Principal": {
- "AWS": [
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser1",
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"
- ]
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser1", "arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]
},
"Sid": "Test",
"Resource": "arn:aws:s3:::test_policy/*",
- "Effect": "Allow"
- }
- ]
+ "Effect": "Allow",
+ },
+ ],
}
# Different than larger_policy_two: a different principal is given
@@ -208,28 +190,21 @@ class TestComparePolicy():
"Version": "2012-10-17",
"Statement": [
{
- "Principal": {
- "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]
- },
+ "Principal": {"AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser2"]},
"Effect": "Allow",
"Resource": "arn:aws:s3:::test_policy/*",
- "Action": [
- "s3:PutObject",
- "s3:PutObjectAcl"]
+ "Action": ["s3:PutObject", "s3:PutObjectAcl"],
},
{
"Action": "s3:PutObjectAcl",
"Principal": {
- "AWS": [
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser1",
- "arn:aws:iam::XXXXXXXXXXXX:user/testuser3"
- ]
+ "AWS": ["arn:aws:iam::XXXXXXXXXXXX:user/testuser1", "arn:aws:iam::XXXXXXXXXXXX:user/testuser3"]
},
"Sid": "Test",
"Resource": "arn:aws:s3:::test_policy/*",
- "Effect": "Allow"
- }
- ]
+ "Effect": "Allow",
+ },
+ ],
}
# Minimal policy using wildcarded Principal
@@ -237,16 +212,12 @@ class TestComparePolicy():
"Version": "2012-10-17",
"Statement": [
{
- "Principal": {
- "AWS": ["*"]
- },
+ "Principal": {"AWS": ["*"]},
"Effect": "Allow",
"Resource": "arn:aws:s3:::test_policy/*",
- "Action": [
- "s3:PutObject",
- "s3:PutObjectAcl"]
+ "Action": ["s3:PutObject", "s3:PutObjectAcl"],
}
- ]
+ ],
}
# Minimal policy using wildcarded Principal
@@ -257,11 +228,9 @@ class TestComparePolicy():
"Principal": "*",
"Effect": "Allow",
"Resource": "arn:aws:s3:::test_policy/*",
- "Action": [
- "s3:PutObject",
- "s3:PutObjectAcl"]
+ "Action": ["s3:PutObject", "s3:PutObjectAcl"],
}
- ]
+ ],
}
# ========================================================
@@ -269,71 +238,82 @@ class TestComparePolicy():
# ========================================================
def test_compare_small_policies_without_differences(self):
- """ Testing two small policies which are identical except for:
- * The contents of the statement are in different orders
- * The second policy contains a list of length one whereas in the first it is a string
+ """Testing two small policies which are identical except for:
+ * The contents of the statement are in different orders
+ * The second policy contains a list of length one whereas in the first it is a string
"""
assert compare_policies(self.small_policy_one, self.small_policy_two) is False
def test_compare_large_policies_without_differences(self):
- """ Testing two larger policies which are identical except for:
- * The statements are in different orders
- * The contents of the statements are also in different orders
- * The second contains a list of length one for the Principal whereas in the first it is a string
+ """Testing two larger policies which are identical except for:
+ * The statements are in different orders
+ * The contents of the statements are also in different orders
+ * The second contains a list of length one for the Principal whereas in the first it is a string
"""
assert compare_policies(self.larger_policy_one, self.larger_policy_two) is False
def test_compare_larger_policies_with_difference(self):
- """ Testing two larger policies which are identical except for:
- * one different principal
+ """Testing two larger policies which are identical except for:
+ * one different principal
"""
assert compare_policies(self.larger_policy_two, self.larger_policy_three) is True
def test_compare_smaller_policy_with_larger(self):
- """ Testing two policies of different sizes """
+ """Testing two policies of different sizes"""
assert compare_policies(self.larger_policy_one, self.small_policy_one) is True
def test_compare_boolean_policy_bool_and_string_are_equal(self):
- """ Testing two policies one using a quoted boolean, the other a bool """
+ """Testing two policies one using a quoted boolean, the other a bool"""
assert compare_policies(self.bool_policy_string, self.bool_policy_bool) is False
def test_compare_numeric_policy_number_and_string_are_equal(self):
- """ Testing two policies one using a quoted number, the other an int """
+ """Testing two policies one using a quoted number, the other an int"""
assert compare_policies(self.numeric_policy_string, self.numeric_policy_number) is False
def test_compare_version_policies_defaults_old(self):
- """ Testing that a policy without Version is considered identical to one
+ """Testing that a policy without Version is considered identical to one
with the 'old' Version (by default)
"""
assert compare_policies(self.version_policy_old, self.version_policy_missing) is False
assert compare_policies(self.version_policy_new, self.version_policy_missing) is True
def test_compare_version_policies_default_disabled(self):
- """ Testing that a policy without Version not considered identical when default_version=None
- """
+ """Testing that a policy without Version not considered identical when default_version=None"""
assert compare_policies(self.version_policy_missing, self.version_policy_missing, default_version=None) is False
assert compare_policies(self.version_policy_old, self.version_policy_missing, default_version=None) is True
assert compare_policies(self.version_policy_new, self.version_policy_missing, default_version=None) is True
def test_compare_version_policies_default_set(self):
- """ Testing that a policy without Version is only considered identical
+ """Testing that a policy without Version is only considered identical
when default_version="2008-10-17"
"""
- assert compare_policies(self.version_policy_missing, self.version_policy_missing, default_version="2012-10-17") is False
- assert compare_policies(self.version_policy_old, self.version_policy_missing, default_version="2012-10-17") is True
- assert compare_policies(self.version_policy_old, self.version_policy_missing, default_version="2008-10-17") is False
- assert compare_policies(self.version_policy_new, self.version_policy_missing, default_version="2012-10-17") is False
- assert compare_policies(self.version_policy_new, self.version_policy_missing, default_version="2008-10-17") is True
+ assert (
+ compare_policies(self.version_policy_missing, self.version_policy_missing, default_version="2012-10-17")
+ is False
+ )
+ assert (
+ compare_policies(self.version_policy_old, self.version_policy_missing, default_version="2012-10-17") is True
+ )
+ assert (
+ compare_policies(self.version_policy_old, self.version_policy_missing, default_version="2008-10-17")
+ is False
+ )
+ assert (
+ compare_policies(self.version_policy_new, self.version_policy_missing, default_version="2012-10-17")
+ is False
+ )
+ assert (
+ compare_policies(self.version_policy_new, self.version_policy_missing, default_version="2008-10-17") is True
+ )
def test_compare_version_policies_with_none(self):
- """ Testing that comparing with no policy works
- """
+ """Testing that comparing with no policy works"""
assert compare_policies(self.small_policy_one, None) is True
assert compare_policies(None, self.small_policy_one) is True
assert compare_policies(None, None) is False
def test_compare_wildcard_policies_without_differences(self):
- """ Testing two small wildcard policies which are identical except for:
- * Principal: "*" vs Principal: ["AWS": "*"]
+ """Testing two small wildcard policies which are identical except for:
+ * Principal: "*" vs Principal: ["AWS": "*"]
"""
assert compare_policies(self.wildcard_policy_one, self.wildcard_policy_two) is False
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_py3cmp.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_py3cmp.py
new file mode 100644
index 000000000..3d9711ac9
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_py3cmp.py
@@ -0,0 +1,40 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.module_utils.policy import _py3cmp
+
+
+def test_py3cmp_simple():
+ assert _py3cmp(1, 1) == 0
+ assert _py3cmp(1, 2) == -1
+ assert _py3cmp(2, 1) == 1
+ assert _py3cmp("1", "1") == 0
+ assert _py3cmp("1", "2") == -1
+ assert _py3cmp("2", "1") == 1
+ assert _py3cmp("a", "a") == 0
+ assert _py3cmp("a", "b") == -1
+ assert _py3cmp("b", "a") == 1
+ assert _py3cmp(("a",), ("a",)) == 0
+ assert _py3cmp(("a",), ("b",)) == -1
+ assert _py3cmp(("b",), ("a",)) == 1
+
+
+def test_py3cmp_mixed():
+ # Replicates the Python2 comparison behaviour of placing strings before tuples
+ assert _py3cmp(("a",), "a") == 1
+ assert _py3cmp("a", ("a",)) == -1
+
+ assert _py3cmp(("a",), "b") == 1
+ assert _py3cmp("b", ("a",)) == -1
+ assert _py3cmp(("b",), "a") == 1
+ assert _py3cmp("a", ("b",)) == -1
+
+ # intended for use by _hashable_policy, so expects either a string or a tuple
+ with pytest.raises(TypeError):
+ _py3cmp((1,), 1)
+ with pytest.raises(TypeError):
+ _py3cmp(1, (1,))
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_simple_hashable_policy.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_simple_hashable_policy.py
new file mode 100644
index 000000000..0f8d07cc5
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_simple_hashable_policy.py
@@ -0,0 +1,28 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from ansible_collections.amazon.aws.plugins.module_utils.policy import _hashable_policy
+
+
+def test_hashable_policy_none():
+ assert _hashable_policy(None, []) == []
+
+
+def test_hashable_policy_boolean():
+ assert _hashable_policy(True, []) == ("true",)
+ assert _hashable_policy(False, []) == ("false",)
+
+
+def test_hashable_policy_int():
+ assert _hashable_policy(1, []) == ("1",)
+ assert _hashable_policy(42, []) == ("42",)
+ assert _hashable_policy(0, []) == ("0",)
+
+
+def test_hashable_policy_string():
+ assert _hashable_policy("simple_string", []) == ["simple_string"]
+ assert _hashable_policy("123456789012", []) == ["123456789012"]
+ # This is a special case, we generally expect to have gone via _canonify_root_arn
+ assert _hashable_policy("arn:aws:iam::123456789012:root", []) == ["123456789012"]
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_sort_json_policy_dict.py b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_sort_json_policy_dict.py
new file mode 100644
index 000000000..8829f332c
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/policy/test_sort_json_policy_dict.py
@@ -0,0 +1,61 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from ansible_collections.amazon.aws.plugins.module_utils.policy import sort_json_policy_dict
+
+
+def test_nothing_to_sort():
+ simple_dict = {"key1": "a"}
+ nested_dict = {"key1": {"key2": "a"}}
+ very_nested_dict = {"key1": {"key2": {"key3": "a"}}}
+ assert sort_json_policy_dict(simple_dict) == simple_dict
+ assert sort_json_policy_dict(nested_dict) == nested_dict
+ assert sort_json_policy_dict(very_nested_dict) == very_nested_dict
+
+
+def test_basic_sort():
+ simple_dict = {"key1": [1, 2, 3, 4], "key2": [9, 8, 7, 6]}
+ sorted_dict = {"key1": [1, 2, 3, 4], "key2": [6, 7, 8, 9]}
+ assert sort_json_policy_dict(simple_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+ simple_dict = {"key1": ["a", "b", "c", "d"], "key2": ["z", "y", "x", "w"]}
+ sorted_dict = {"key1": ["a", "b", "c", "d"], "key2": ["w", "x", "y", "z"]}
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+
+
+def test_nested_list_sort():
+ nested_dict = {"key1": {"key2": [9, 8, 7, 6]}}
+ sorted_dict = {"key1": {"key2": [6, 7, 8, 9]}}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+ nested_dict = {"key1": {"key2": ["z", "y", "x", "w"]}}
+ sorted_dict = {"key1": {"key2": ["w", "x", "y", "z"]}}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+
+
+def test_nested_dict_list_sort():
+ nested_dict = {"key1": {"key2": {"key3": [9, 8, 7, 6]}}}
+ sorted_dict = {"key1": {"key2": {"key3": [6, 7, 8, 9]}}}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+ nested_dict = {"key1": {"key2": {"key3": ["z", "y", "x", "w"]}}}
+ sorted_dict = {"key1": {"key2": {"key3": ["w", "x", "y", "z"]}}}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+
+
+def test_list_of_dict_sort():
+ nested_dict = {"key1": [{"key2": [4, 3, 2, 1]}, {"key3": [9, 8, 7, 6]}]}
+ sorted_dict = {"key1": [{"key2": [1, 2, 3, 4]}, {"key3": [6, 7, 8, 9]}]}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
+
+
+def test_list_of_list_sort():
+ nested_dict = {"key1": [[4, 3, 2, 1], [9, 8, 7, 6]]}
+ sorted_dict = {"key1": [[1, 2, 3, 4], [6, 7, 8, 9]]}
+ assert sort_json_policy_dict(nested_dict) == sorted_dict
+ assert sort_json_policy_dict(sorted_dict) == sorted_dict
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/retries/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_awsretry.py b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_awsretry.py
index e08700382..6141149ea 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_awsretry.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_awsretry.py
@@ -4,9 +4,6 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
try:
import botocore
except ImportError:
@@ -14,19 +11,18 @@ except ImportError:
import pytest
-from ansible_collections.amazon.aws.plugins.module_utils.retries import AWSRetry
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+from ansible_collections.amazon.aws.plugins.module_utils.retries import AWSRetry
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_awsretry.py requires the python modules 'boto3' and 'botocore'")
-class TestAWSRetry():
-
+class TestAWSRetry:
def test_no_failures(self):
self.counter = 0
- @AWSRetry.backoff(tries=2, delay=0.1)
+ @AWSRetry.exponential_backoff(retries=2, delay=0.1)
def no_failures():
self.counter += 1
@@ -35,62 +31,62 @@ class TestAWSRetry():
def test_extend_boto3_failures(self):
self.counter = 0
- err_response = {'Error': {'Code': 'MalformedPolicyDocument'}}
+ err_response = {"Error": {"Code": "MalformedPolicyDocument"}}
- @AWSRetry.backoff(tries=2, delay=0.1, catch_extra_error_codes=['MalformedPolicyDocument'])
+ @AWSRetry.exponential_backoff(retries=2, delay=0.1, catch_extra_error_codes=["MalformedPolicyDocument"])
def extend_failures():
self.counter += 1
if self.counter < 2:
- raise botocore.exceptions.ClientError(err_response, 'You did something wrong.')
+ raise botocore.exceptions.ClientError(err_response, "You did something wrong.")
else:
- return 'success'
+ return "success"
result = extend_failures()
- assert result == 'success'
+ assert result == "success"
assert self.counter == 2
def test_retry_once(self):
self.counter = 0
- err_response = {'Error': {'Code': 'InternalFailure'}}
+ err_response = {"Error": {"Code": "InternalFailure"}}
- @AWSRetry.backoff(tries=2, delay=0.1)
+ @AWSRetry.exponential_backoff(retries=2, delay=0.1)
def retry_once():
self.counter += 1
if self.counter < 2:
- raise botocore.exceptions.ClientError(err_response, 'Something went wrong!')
+ raise botocore.exceptions.ClientError(err_response, "Something went wrong!")
else:
- return 'success'
+ return "success"
result = retry_once()
- assert result == 'success'
+ assert result == "success"
assert self.counter == 2
def test_reached_limit(self):
self.counter = 0
- err_response = {'Error': {'Code': 'RequestLimitExceeded'}}
+ err_response = {"Error": {"Code": "RequestLimitExceeded"}}
- @AWSRetry.backoff(tries=4, delay=0.1)
+ @AWSRetry.exponential_backoff(retries=4, delay=0.1)
def fail():
self.counter += 1
- raise botocore.exceptions.ClientError(err_response, 'toooo fast!!')
+ raise botocore.exceptions.ClientError(err_response, "toooo fast!!")
with pytest.raises(botocore.exceptions.ClientError) as context:
fail()
response = context.value.response
- assert response['Error']['Code'] == 'RequestLimitExceeded'
+ assert response["Error"]["Code"] == "RequestLimitExceeded"
assert self.counter == 4
def test_unexpected_exception_does_not_retry(self):
self.counter = 0
- err_response = {'Error': {'Code': 'AuthFailure'}}
+ err_response = {"Error": {"Code": "AuthFailure"}}
- @AWSRetry.backoff(tries=4, delay=0.1)
+ @AWSRetry.exponential_backoff(retries=4, delay=0.1)
def raise_unexpected_error():
self.counter += 1
- raise botocore.exceptions.ClientError(err_response, 'unexpected error')
+ raise botocore.exceptions.ClientError(err_response, "unexpected error")
with pytest.raises(botocore.exceptions.ClientError) as context:
raise_unexpected_error()
response = context.value.response
- assert response['Error']['Code'] == 'AuthFailure'
+ assert response["Error"]["Code"] == "AuthFailure"
assert self.counter == 1
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_botocore_exception_maybe.py b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_botocore_exception_maybe.py
new file mode 100644
index 000000000..758514750
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_botocore_exception_maybe.py
@@ -0,0 +1,18 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import ansible_collections.amazon.aws.plugins.module_utils.retries as util_retries
+
+
+def test_botocore_exception_maybe(monkeypatch):
+ none_type = type(None)
+ assert util_retries._botocore_exception_maybe() is botocore.exceptions.ClientError
+ monkeypatch.setattr(util_retries, "HAS_BOTO3", False)
+ assert util_retries._botocore_exception_maybe() is none_type
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_retry_wrapper.py b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_retry_wrapper.py
new file mode 100644
index 000000000..406e31826
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/retries/test_retry_wrapper.py
@@ -0,0 +1,267 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+import ansible_collections.amazon.aws.plugins.module_utils.botocore as util_botocore
+import ansible_collections.amazon.aws.plugins.module_utils.retries as util_retries
+
+
+@pytest.fixture
+def fake_client():
+ retryable_response = {"Error": {"Code": "RequestLimitExceeded", "Message": "Something went wrong"}}
+ retryable_exception = botocore.exceptions.ClientError(retryable_response, "fail_retryable")
+ not_retryable_response = {"Error": {"Code": "AnotherProblem", "Message": "Something went wrong"}}
+ not_retryable_exception = botocore.exceptions.ClientError(not_retryable_response, "fail_not_retryable")
+
+ client = MagicMock()
+
+ client.fail_retryable.side_effect = retryable_exception
+ client.fail_not_retryable.side_effect = not_retryable_exception
+ client.my_attribute = sentinel.ATTRIBUTE
+ client.successful.return_value = sentinel.RETURNED_SUCCESSFUL
+
+ return client
+
+
+@pytest.fixture
+def quick_backoff():
+ # Because RetryingBotoClientWrapper will wrap resources using the this decorator,
+ # we're going to rely on AWSRetry.jittered_backoff rather than trying to mock out
+ # a decorator use a really short delay to keep the tests quick, and we only need
+ # to actually retry once
+ retry = util_retries.AWSRetry.jittered_backoff(retries=2, delay=0.1)
+ return retry
+
+
+def test_retry_wrapper_non_callable(fake_client, quick_backoff):
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+
+ # non-callable's shouldn't be wrapped, we should just get them back
+ assert wrapped_client.my_attribute is sentinel.ATTRIBUTE
+
+
+def test_retry_wrapper_callable(fake_client, quick_backoff):
+ # Minimal test: not testing the aws_retry=True behaviour
+ # (In general) callables should be wrapped
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+
+ assert isinstance(fake_client.fail_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_retryable, MagicMock)
+ assert callable(wrapped_client.fail_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_retryable()
+ boto3_code = util_botocore.is_boto3_error_code("RequestLimitExceeded", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_retryable.called
+ assert fake_client.fail_retryable.call_count == 1
+
+ assert isinstance(fake_client.fail_not_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_not_retryable, MagicMock)
+ assert callable(wrapped_client.fail_not_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_not_retryable()
+ boto3_code = util_botocore.is_boto3_error_code("AnotherProblem", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_not_retryable.called
+ assert fake_client.fail_not_retryable.call_count == 1
+
+ assert isinstance(fake_client.successful, MagicMock)
+ assert not isinstance(wrapped_client.successful, MagicMock)
+ assert callable(fake_client.successful)
+ assert wrapped_client.successful() is sentinel.RETURNED_SUCCESSFUL
+ assert fake_client.successful.called
+ assert fake_client.successful.call_count == 1
+
+
+def test_retry_wrapper_never_wrap(fake_client, quick_backoff):
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+
+ assert isinstance(fake_client.get_paginator, MagicMock)
+ assert isinstance(wrapped_client.get_paginator, MagicMock)
+ assert wrapped_client.get_paginator is fake_client.get_paginator
+
+
+def test_retry_wrapper_no_retry_no_args(fake_client, quick_backoff):
+ # Minimal test: not testing the aws_retry=True behaviour
+ # (In general) callables should be wrapped
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+ call_args = call()
+
+ assert isinstance(fake_client.fail_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_retryable, MagicMock)
+ assert callable(wrapped_client.fail_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_retryable(aws_retry=False)
+ boto3_code = util_botocore.is_boto3_error_code("RequestLimitExceeded", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_retryable.called
+ assert fake_client.fail_retryable.call_count == 1
+ assert fake_client.fail_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.fail_not_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_not_retryable, MagicMock)
+ assert callable(wrapped_client.fail_not_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_not_retryable(aws_retry=False)
+ boto3_code = util_botocore.is_boto3_error_code("AnotherProblem", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_not_retryable.called
+ assert fake_client.fail_not_retryable.call_count == 1
+ assert fake_client.fail_not_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.successful, MagicMock)
+ assert not isinstance(wrapped_client.successful, MagicMock)
+ assert callable(fake_client.successful)
+ assert wrapped_client.successful(aws_retry=False) is sentinel.RETURNED_SUCCESSFUL
+ assert fake_client.successful.called
+ assert fake_client.successful.call_count == 1
+ assert fake_client.successful.call_args_list == [call_args]
+
+
+def test_retry_wrapper_retry_no_args(fake_client, quick_backoff):
+ # Minimal test: not testing the aws_retry=True behaviour
+ # (In general) callables should be wrapped
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+ call_args = call()
+
+ assert isinstance(fake_client.fail_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_retryable, MagicMock)
+ assert callable(wrapped_client.fail_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_retryable(aws_retry=True)
+ boto3_code = util_botocore.is_boto3_error_code("RequestLimitExceeded", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_retryable.called
+ assert fake_client.fail_retryable.call_count == 2
+ assert fake_client.fail_retryable.call_args_list == [call_args, call_args]
+
+ assert isinstance(fake_client.fail_not_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_not_retryable, MagicMock)
+ assert callable(wrapped_client.fail_not_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_not_retryable(aws_retry=True)
+ boto3_code = util_botocore.is_boto3_error_code("AnotherProblem", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_not_retryable.called
+ assert fake_client.fail_not_retryable.call_count == 1
+ assert fake_client.fail_not_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.successful, MagicMock)
+ assert not isinstance(wrapped_client.successful, MagicMock)
+ assert callable(fake_client.successful)
+ assert wrapped_client.successful(aws_retry=True) is sentinel.RETURNED_SUCCESSFUL
+ assert fake_client.successful.called
+ assert fake_client.successful.call_count == 1
+ assert fake_client.successful.call_args_list == [call_args]
+
+
+def test_retry_wrapper_no_retry_args(fake_client, quick_backoff):
+ # Minimal test: not testing the aws_retry=True behaviour
+ # (In general) callables should be wrapped
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+ args = [sentinel.ARG_1, sentinel.ARG_2]
+ kwargs = {"kw1": sentinel.KWARG_1, "kw2": sentinel.KWARG_2}
+ # aws_retry=False shouldn't be passed to the 'wrapped' call
+ call_args = call(*args, **kwargs)
+
+ assert isinstance(fake_client.fail_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_retryable, MagicMock)
+ assert callable(wrapped_client.fail_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_retryable(*args, aws_retry=False, **kwargs)
+ boto3_code = util_botocore.is_boto3_error_code("RequestLimitExceeded", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_retryable.called
+ assert fake_client.fail_retryable.call_count == 1
+ assert fake_client.fail_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.fail_not_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_not_retryable, MagicMock)
+ assert callable(wrapped_client.fail_not_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_not_retryable(*args, aws_retry=False, **kwargs)
+ boto3_code = util_botocore.is_boto3_error_code("AnotherProblem", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_not_retryable.called
+ assert fake_client.fail_not_retryable.call_count == 1
+ assert fake_client.fail_not_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.successful, MagicMock)
+ assert not isinstance(wrapped_client.successful, MagicMock)
+ assert callable(fake_client.successful)
+ assert wrapped_client.successful(*args, aws_retry=False, **kwargs) is sentinel.RETURNED_SUCCESSFUL
+ assert fake_client.successful.called
+ assert fake_client.successful.call_count == 1
+ assert fake_client.successful.call_args_list == [call_args]
+
+
+def test_retry_wrapper_retry_no_args(fake_client, quick_backoff):
+ # Minimal test: not testing the aws_retry=True behaviour
+ # (In general) callables should be wrapped
+ wrapped_client = util_retries.RetryingBotoClientWrapper(fake_client, quick_backoff)
+ args = [sentinel.ARG_1, sentinel.ARG_2]
+ kwargs = {"kw1": sentinel.KWARG_1, "kw2": sentinel.KWARG_2}
+ # aws_retry=True shouldn't be passed to the 'wrapped' call
+ call_args = call(*args, **kwargs)
+
+ assert isinstance(fake_client.fail_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_retryable, MagicMock)
+ assert callable(wrapped_client.fail_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_retryable(*args, aws_retry=True, **kwargs)
+ boto3_code = util_botocore.is_boto3_error_code("RequestLimitExceeded", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_retryable.called
+ assert fake_client.fail_retryable.call_count == 2
+ assert fake_client.fail_retryable.call_args_list == [call_args, call_args]
+
+ assert isinstance(fake_client.fail_not_retryable, MagicMock)
+ assert not isinstance(wrapped_client.fail_not_retryable, MagicMock)
+ assert callable(wrapped_client.fail_not_retryable)
+ with pytest.raises(botocore.exceptions.ClientError) as e:
+ wrapped_client.fail_not_retryable(*args, aws_retry=True, **kwargs)
+ boto3_code = util_botocore.is_boto3_error_code("AnotherProblem", e=e.value)
+ boto3_message = util_botocore.is_boto3_error_message("Something went wrong", e=e.value)
+ assert boto3_code is botocore.exceptions.ClientError
+ assert boto3_message is botocore.exceptions.ClientError
+ assert fake_client.fail_not_retryable.called
+ assert fake_client.fail_not_retryable.call_count == 1
+ assert fake_client.fail_not_retryable.call_args_list == [call_args]
+
+ assert isinstance(fake_client.successful, MagicMock)
+ assert not isinstance(wrapped_client.successful, MagicMock)
+ assert callable(fake_client.successful)
+ assert wrapped_client.successful(*args, aws_retry=True, **kwargs) is sentinel.RETURNED_SUCCESSFUL
+ assert fake_client.successful.called
+ assert fake_client.successful.call_count == 1
+ assert fake_client.successful.call_args_list == [call_args]
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_acm.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_acm.py
new file mode 100644
index 000000000..e3b49d146
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_acm.py
@@ -0,0 +1,348 @@
+#
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+
+import random
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+
+from ansible_collections.amazon.aws.plugins.module_utils.acm import ACMServiceManager
+from ansible_collections.amazon.aws.plugins.module_utils.acm import acm_catch_boto_exception
+
+MODULE_NAME = "ansible_collections.amazon.aws.plugins.module_utils.acm"
+
+
+@pytest.fixture()
+def acm_service_mgr():
+ module = MagicMock()
+ module.fail_json_aws.side_effect = SystemExit(2)
+ module.fail_json.side_effect = SystemExit(1)
+ module.client.return_value = MagicMock()
+
+ acm_service_mgr_obj = ACMServiceManager(module)
+
+ return acm_service_mgr_obj
+
+
+def raise_botocore_error(code="AccessDenied"):
+ return botocore.exceptions.ClientError({"Error": {"Code": code}}, "Certificate")
+
+
+@pytest.mark.parametrize("has_module_arg", [True, False])
+def test_acm_catch_boto_exception_failure(has_module_arg):
+ module = MagicMock()
+ module.fail_json_aws.side_effect = SystemExit(2)
+
+ boto_err = raise_botocore_error()
+
+ @acm_catch_boto_exception
+ def generate_boto_exception():
+ raise boto_err
+
+ if has_module_arg:
+ with pytest.raises(SystemExit):
+ generate_boto_exception(module=module, error="test")
+ module.fail_json_aws.assert_called_with(boto_err, msg="test")
+ else:
+ with pytest.raises(botocore.exceptions.ClientError):
+ generate_boto_exception(error="test")
+ module.fail_json_aws.assert_not_called()
+
+
+def test_acm_catch_boto_exception_with_ignore_code():
+ codes = ["this_exception_code_is_ignored", "this_another_exception_code_is_ignored"]
+
+ @acm_catch_boto_exception
+ def raise_exception_with_ignore_error_code(**kwargs):
+ raise raise_botocore_error(code=random.choice(codes))
+
+ assert raise_exception_with_ignore_error_code(ignore_error_codes=codes) is None
+
+
+def test_acm_catch_boto_exception():
+ data = {i: MagicMock() for i in range(10)}
+
+ @acm_catch_boto_exception
+ def get_data(*args, **kwargs):
+ if len(args) > 0:
+ return data.get(args[0])
+ return data.get(kwargs.get("id"))
+
+ for i in range(10):
+ assert data.get(i) == get_data(i)
+ assert data.get(i) == get_data(id=i)
+
+
+def test_acm_service_manager_init():
+ module = MagicMock()
+ module.client.return_value = {"client": "unit_tests"}
+
+ ACMServiceManager(module)
+ module.client.assert_called_once_with("acm")
+
+
+def test_acm_service_manager_get_domain_of_cert(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+
+ certificate = {"Certificate": {"DomainName": MagicMock()}, "ResponseMetaData": {"code": 200}}
+ acm_service_mgr.client.describe_certificate.return_value = certificate
+ assert acm_service_mgr.get_domain_of_cert(arn=arn) == certificate["Certificate"]["DomainName"]
+
+
+def test_acm_service_manager_get_domain_of_cert_missing_arn(acm_service_mgr):
+ with pytest.raises(SystemExit):
+ acm_service_mgr.get_domain_of_cert(arn=None)
+ error = "Internal error with ACM domain fetching, no certificate ARN specified"
+ acm_service_mgr.module.fail_json.assert_called_with(msg=error)
+ acm_service_mgr.module.fail_json_aws.assert_not_called()
+
+
+def test_acm_service_manager_get_domain_of_cert_failure(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ boto_err = raise_botocore_error()
+
+ acm_service_mgr.client.describe_certificate.side_effect = boto_err
+ with pytest.raises(SystemExit):
+ acm_service_mgr.get_domain_of_cert(arn=arn)
+
+ error = f"Couldn't obtain certificate data for arn {arn}"
+ acm_service_mgr.module.fail_json_aws.assert_called_with(boto_err, msg=error)
+ acm_service_mgr.module.fail.assert_not_called()
+
+
+def test_acm_service_manager_get_domain_of_cert_with_retry_and_success(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ boto_err = raise_botocore_error(code="ResourceNotFoundException")
+ certificate = {"Certificate": {"DomainName": MagicMock()}, "ResponseMetaData": {"code": 200}}
+ acm_service_mgr.client.describe_certificate.side_effect = [boto_err, certificate]
+ assert acm_service_mgr.get_domain_of_cert(arn=arn) == certificate["Certificate"]["DomainName"]
+
+
+def test_acm_service_manager_get_domain_of_cert_with_retry_and_failure(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ boto_err = raise_botocore_error(code="ResourceNotFoundException")
+
+ acm_service_mgr.client.describe_certificate.side_effect = [boto_err for i in range(10)]
+ with pytest.raises(SystemExit):
+ acm_service_mgr.get_domain_of_cert(arn=arn)
+
+
+def test_acm_service_manager_import_certificate_failure_at_import(acm_service_mgr):
+ acm_service_mgr.client.import_certificate.side_effect = raise_botocore_error()
+ with pytest.raises(SystemExit):
+ acm_service_mgr.import_certificate(certificate=MagicMock(), private_key=MagicMock())
+
+
+def test_acm_service_manager_import_certificate_failure_at_tagging(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ acm_service_mgr.client.import_certificate.return_value = {"CertificateArn": arn}
+
+ boto_err = raise_botocore_error()
+ acm_service_mgr.client.add_tags_to_certificate.side_effect = boto_err
+
+ with pytest.raises(SystemExit):
+ acm_service_mgr.import_certificate(certificate=MagicMock(), private_key=MagicMock())
+ acm_service_mgr.module.fail_json_aws.assert_called_with(boto_err, msg=f"Couldn't tag certificate {arn}")
+
+
+def test_acm_service_manager_import_certificate_failure_at_deletion(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ acm_service_mgr.client.import_certificate.return_value = {"CertificateArn": arn}
+
+ acm_service_mgr.client.add_tags_to_certificate.side_effect = raise_botocore_error()
+ delete_err = raise_botocore_error(code="DeletionError")
+ acm_service_mgr.client.delete_certificate.side_effect = delete_err
+
+ with pytest.raises(SystemExit):
+ acm_service_mgr.import_certificate(certificate=MagicMock(), private_key=MagicMock())
+ acm_service_mgr.module.warn.assert_called_with(
+ f"Certificate {arn} exists, and is not tagged. So Ansible will not see it on the next run."
+ )
+
+
+def test_acm_service_manager_import_certificate_failure_with_arn_change(acm_service_mgr):
+ original_arn = "original_arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+
+ acm_service_mgr.import_certificate_with_backoff = MagicMock()
+ acm_service_mgr.import_certificate_with_backoff.return_value = arn
+
+ with pytest.raises(SystemExit):
+ acm_service_mgr.import_certificate(certificate=MagicMock(), private_key=MagicMock(), arn=original_arn)
+ acm_service_mgr.module.fail_json.assert_called_with(
+ msg=f"ARN changed with ACM update, from {original_arn} to {arn}"
+ )
+
+
+def test_acm_service_manager_import_certificate(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+
+ acm_service_mgr.import_certificate_with_backoff = MagicMock()
+ acm_service_mgr.import_certificate_with_backoff.return_value = arn
+
+ acm_service_mgr.tag_certificate_with_backoff = MagicMock()
+
+ assert arn == acm_service_mgr.import_certificate(certificate=MagicMock(), private_key=MagicMock(), arn=arn)
+
+
+def test_acm_service_manager_delete_certificate_keyword_arn(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ acm_service_mgr.delete_certificate_with_backoff = MagicMock()
+ acm_service_mgr.delete_certificate(arn=arn)
+ err = f"Couldn't delete certificate {arn}"
+ acm_service_mgr.delete_certificate_with_backoff.assert_called_with(arn, module=acm_service_mgr.module, error=err)
+
+
+def test_acm_service_manager_delete_certificate_positional_arn(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ acm_service_mgr.delete_certificate_with_backoff = MagicMock()
+ module = MagicMock()
+ client = MagicMock()
+ acm_service_mgr.delete_certificate(module, client, arn)
+ err = f"Couldn't delete certificate {arn}"
+ acm_service_mgr.delete_certificate_with_backoff.assert_called_with(arn, module=acm_service_mgr.module, error=err)
+
+
+def test_acm_service_manager_delete_certificate_missing_arn(acm_service_mgr):
+ with pytest.raises(SystemExit):
+ acm_service_mgr.delete_certificate()
+ acm_service_mgr.module.fail_json.assert_called_with(msg="Missing required certificate arn to delete.")
+
+
+def test_acm_service_manager_delete_certificate_failure(acm_service_mgr):
+ arn = "arn:aws:acm:us-west-01:123456789012:certificate/12345678-1234-1234-1234-123456789012"
+ acm_service_mgr.client.delete_certificate.side_effect = raise_botocore_error()
+ with pytest.raises(SystemExit):
+ acm_service_mgr.delete_certificate(arn=arn)
+
+
+@pytest.mark.parametrize(
+ "ref,cert,result",
+ [
+ (None, ANY, True),
+ ({"phase": "test"}, {"Phase": "test"}, False),
+ ({"phase": "test"}, {"phase": "test"}, True),
+ ({"phase": "test"}, {"phase": "test", "collection": "amazon.aws"}, True),
+ ({"phase": "test", "collection": "amazon"}, {"phase": "test", "collection": "amazon.aws"}, False),
+ ({"phase": "test", "collection": "amazon"}, {"phase": "test"}, False),
+ ],
+)
+def test_acm_service_manager_match_tags(acm_service_mgr, ref, cert, result):
+ assert acm_service_mgr._match_tags(ref, cert) == result
+
+
+def test_acm_service_manager_match_tags_failure(acm_service_mgr):
+ with pytest.raises(SystemExit):
+ acm_service_mgr._match_tags({"Tag": "tag1"}, 10)
+ acm_service_mgr.module.fail_json_aws.assert_called_once()
+
+
+def test_acm_service_manager_get_certificates_no_certificates(acm_service_mgr):
+ acm_service_mgr.list_certificates_with_backoff = MagicMock()
+ acm_service_mgr.list_certificates_with_backoff.return_value = []
+
+ assert acm_service_mgr.get_certificates(domain_name=MagicMock(), statuses=MagicMock(), arn=ANY, only_tags=ANY) == []
+
+
+@pytest.mark.parametrize(
+ "domain_name,arn,tags,expected",
+ [
+ (None, None, None, [0, 1, 3]),
+ ("ansible.com", None, None, [0]),
+ ("ansible.com", "arn:aws:1", None, [0]),
+ (None, "arn:aws:1", None, [0]),
+ (None, "arn:aws:4", None, [3]),
+ ("ansible.com", "arn:aws:3", None, []),
+ ("ansible.org", None, None, [1, 3]),
+ ("ansible.org", "arn:aws:2", None, [1]),
+ ("ansible.org", "arn:aws:4", None, [3]),
+ (None, None, {"CertificateArn": "arn:aws:2"}, [1]),
+ (None, None, {"CertificateType": "x509"}, [0, 1]),
+ (None, None, {"CertificateType": "x509", "CertificateArn": "arn:aws:2"}, [1]),
+ ],
+)
+def test_acm_service_manager_get_certificates(acm_service_mgr, domain_name, arn, tags, expected):
+ all_certificates = [
+ {"CertificateArn": "arn:aws:1", "DomainName": "ansible.com"},
+ {"CertificateArn": "arn:aws:2", "DomainName": "ansible.org"},
+ {"CertificateArn": "arn:aws:3", "DomainName": "ansible.com"},
+ {"CertificateArn": "arn:aws:4", "DomainName": "ansible.org"},
+ ]
+
+ acm_service_mgr.list_certificates_with_backoff = MagicMock()
+ acm_service_mgr.list_certificates_with_backoff.return_value = all_certificates
+
+ describe_certificates = {
+ "arn:aws:1": {"Status": "VALIDATED", "CertificateArn": "arn:aws:1", "AnotherKey": "some_key_value"},
+ "arn:aws:2": {"Status": "VALIDATION_TIMED_OUT", "CertificateArn": "arn:aws:2"},
+ "arn:aws:3": {"Status": "FAILED", "CertificateArn": "arn:aws:3", "CertificateValidity": "11222022"},
+ "arn:aws:4": {"Status": "PENDING_VALIDATION", "CertificateArn": "arn:aws:4"},
+ }
+
+ get_certificates = {
+ "arn:aws:1": {"Provider": "Dummy", "Private": True},
+ "arn:aws:2": None,
+ "arn:aws:3": {},
+ "arn:aws:4": {},
+ }
+
+ certificate_tags = {
+ "arn:aws:1": [
+ {"Key": "Validated", "Value": True},
+ {"Key": "CertificateType", "Value": "x509"},
+ {"Key": "CertificateArn", "Value": "arn:aws:1"},
+ ],
+ "arn:aws:2": [{"Key": "CertificateType", "Value": "x509"}, {"Key": "CertificateArn", "Value": "arn:aws:2"}],
+ "arn:aws:3": None,
+ "arn:aws:4": {},
+ }
+
+ all_results = [
+ {
+ "status": "VALIDATED",
+ "certificate_arn": "arn:aws:1",
+ "another_key": "some_key_value",
+ "provider": "Dummy",
+ "private": True,
+ "tags": {"Validated": True, "CertificateType": "x509", "CertificateArn": "arn:aws:1"},
+ },
+ {
+ "status": "VALIDATION_TIMED_OUT",
+ "certificate_arn": "arn:aws:2",
+ "tags": {"CertificateType": "x509", "CertificateArn": "arn:aws:2"},
+ },
+ {"status": "FAILED", "certificate_arn": "arn:aws:3", "certificate_validity": "11222022"},
+ {"status": "PENDING_VALIDATION", "certificate_arn": "arn:aws:4", "tags": {}},
+ ]
+
+ results = [all_results[i] for i in range(len(all_results)) if i in expected]
+
+ acm_service_mgr.describe_certificate_with_backoff = MagicMock()
+ acm_service_mgr.describe_certificate_with_backoff.side_effect = lambda *args, **kwargs: describe_certificates.get(
+ args[0]
+ )
+
+ acm_service_mgr.get_certificate_with_backoff = MagicMock()
+ acm_service_mgr.get_certificate_with_backoff.side_effect = lambda *args, **kwargs: get_certificates.get(args[0])
+
+ acm_service_mgr.list_certificate_tags_with_backoff = MagicMock()
+ acm_service_mgr.list_certificate_tags_with_backoff.side_effect = lambda *args, **kwargs: certificate_tags.get(
+ args[0], []
+ )
+
+ assert (
+ acm_service_mgr.get_certificates(domain_name=domain_name, statuses=MagicMock(), arn=arn, only_tags=tags)
+ == results
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_cloudfront_facts.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_cloudfront_facts.py
new file mode 100644
index 000000000..774d6bb10
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_cloudfront_facts.py
@@ -0,0 +1,487 @@
+#
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+from ansible_collections.amazon.aws.plugins.module_utils.cloudfront_facts import CloudFrontFactsServiceManager
+from ansible_collections.amazon.aws.plugins.module_utils.cloudfront_facts import CloudFrontFactsServiceManagerFailure
+from ansible_collections.amazon.aws.plugins.module_utils.cloudfront_facts import cloudfront_facts_keyed_list_helper
+
+MODULE_NAME = "ansible_collections.amazon.aws.plugins.module_utils.cloudfront_facts"
+MOCK_CLOUDFRONT_FACTS_KEYED_LIST_HELPER = MODULE_NAME + ".cloudfront_facts_keyed_list_helper"
+
+
+@pytest.fixture()
+def cloudfront_facts_service():
+ module = MagicMock()
+ cloudfront_facts = CloudFrontFactsServiceManager(module)
+
+ cloudfront_facts.module = MagicMock()
+ cloudfront_facts.module.fail_json_aws.side_effect = SystemExit(1)
+
+ cloudfront_facts.client = MagicMock()
+
+ return cloudfront_facts
+
+
+def raise_botocore_error(operation="getCloudFront"):
+ return botocore.exceptions.ClientError(
+ {
+ "Error": {"Code": "AccessDenied", "Message": "User: Unauthorized operation"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ operation,
+ )
+
+
+def test_unsupported_api(cloudfront_facts_service):
+ with pytest.raises(CloudFrontFactsServiceManagerFailure) as err:
+ cloudfront_facts_service._unsupported_api()
+ assert "Method _unsupported_api is not currently supported" in err
+
+
+def test_get_distribution(cloudfront_facts_service):
+ cloudfront_facts = MagicMock()
+ cloudfront_id = MagicMock()
+ cloudfront_facts_service.client.get_distribution.return_value = cloudfront_facts
+
+ assert cloudfront_facts == cloudfront_facts_service.get_distribution(id=cloudfront_id)
+ cloudfront_facts_service.client.get_distribution.assert_called_with(Id=cloudfront_id, aws_retry=True)
+
+
+def test_get_distribution_failure(cloudfront_facts_service):
+ cloudfront_id = MagicMock()
+ cloudfront_facts_service.client.get_distribution.side_effect = raise_botocore_error()
+
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.get_distribution(id=cloudfront_id)
+ cloudfront_facts_service.client.get_distribution.assert_called_with(Id=cloudfront_id, aws_retry=True)
+
+
+def test_get_distribution_fail_if_error(cloudfront_facts_service):
+ cloudfront_id = MagicMock()
+ cloudfront_facts_service.client.get_distribution.side_effect = raise_botocore_error()
+
+ with pytest.raises(botocore.exceptions.ClientError):
+ cloudfront_facts_service.get_distribution(id=cloudfront_id, fail_if_error=False)
+ cloudfront_facts_service.client.get_distribution.assert_called_with(Id=cloudfront_id, aws_retry=True)
+
+
+def test_get_invalidation(cloudfront_facts_service):
+ cloudfront_facts = MagicMock()
+ cloudfront_id = MagicMock()
+ distribution_id = MagicMock()
+ cloudfront_facts_service.client.get_invalidation.return_value = cloudfront_facts
+
+ assert cloudfront_facts == cloudfront_facts_service.get_invalidation(
+ distribution_id=distribution_id, id=cloudfront_id
+ )
+ cloudfront_facts_service.client.get_invalidation.assert_called_with(
+ DistributionId=distribution_id, Id=cloudfront_id, aws_retry=True
+ )
+
+
+def test_get_invalidation_failure(cloudfront_facts_service):
+ cloudfront_id = MagicMock()
+ distribution_id = MagicMock()
+ cloudfront_facts_service.client.get_invalidation.side_effect = raise_botocore_error()
+
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.get_invalidation(distribution_id=distribution_id, id=cloudfront_id)
+
+
+@patch(MOCK_CLOUDFRONT_FACTS_KEYED_LIST_HELPER)
+def test_list_distributions_by_web_acl_id(m_cloudfront_facts_keyed_list_helper, cloudfront_facts_service):
+ web_acl_id = MagicMock()
+ distribution_webacl = {"DistributionList": {"Items": [f"webacl_{int(d)}" for d in range(10)]}}
+ cloudfront_facts_service.client.list_distributions_by_web_acl_id.return_value = distribution_webacl
+ m_cloudfront_facts_keyed_list_helper.return_value = distribution_webacl["DistributionList"]["Items"]
+
+ result = cloudfront_facts_service.list_distributions_by_web_acl_id(web_acl_id=web_acl_id)
+ assert distribution_webacl["DistributionList"]["Items"] == result
+ cloudfront_facts_service.client.list_distributions_by_web_acl_id.assert_called_with(
+ WebAclId=web_acl_id, aws_retry=True
+ )
+ m_cloudfront_facts_keyed_list_helper.assert_called_with(distribution_webacl["DistributionList"]["Items"])
+
+
+@patch(MOCK_CLOUDFRONT_FACTS_KEYED_LIST_HELPER)
+@patch(MODULE_NAME + "._cloudfront_paginate_build_full_result")
+def test_list_origin_access_identities(
+ m_cloudfront_paginate_build_full_result, m_cloudfront_facts_keyed_list_helper, cloudfront_facts_service
+):
+ items = [f"item_{int(d)}" for d in range(10)]
+ result = {"CloudFrontOriginAccessIdentityList": {"Items": items}}
+
+ m_cloudfront_paginate_build_full_result.return_value = result
+ assert items == cloudfront_facts_service.list_origin_access_identities()
+ m_cloudfront_facts_keyed_list_helper.assert_not_called()
+
+
+@patch(MOCK_CLOUDFRONT_FACTS_KEYED_LIST_HELPER)
+@patch(MODULE_NAME + "._cloudfront_paginate_build_full_result")
+def test_list_distributions(
+ m_cloudfront_paginate_build_full_result, m_cloudfront_facts_keyed_list_helper, cloudfront_facts_service
+):
+ items = [f"item_{int(d)}" for d in range(10)]
+ result = {"DistributionList": {"Items": items}}
+
+ m_cloudfront_paginate_build_full_result.return_value = result
+ m_cloudfront_facts_keyed_list_helper.return_value = items
+
+ assert items == cloudfront_facts_service.list_distributions()
+ m_cloudfront_facts_keyed_list_helper.assert_called_with(items)
+
+
+@patch(MOCK_CLOUDFRONT_FACTS_KEYED_LIST_HELPER)
+@patch(MODULE_NAME + "._cloudfront_paginate_build_full_result")
+def test_list_invalidations(
+ m_cloudfront_paginate_build_full_result, m_cloudfront_facts_keyed_list_helper, cloudfront_facts_service
+):
+ items = [f"item_{int(d)}" for d in range(10)]
+ result = {"InvalidationList": {"Items": items}}
+ distribution_id = MagicMock()
+
+ m_cloudfront_paginate_build_full_result.return_value = result
+ m_cloudfront_facts_keyed_list_helper.return_value = items
+
+ assert items == cloudfront_facts_service.list_invalidations(distribution_id=distribution_id)
+ m_cloudfront_facts_keyed_list_helper.assert_not_called()
+ m_cloudfront_paginate_build_full_result.assert_called_with(
+ cloudfront_facts_service.client, "list_invalidations", DistributionId=distribution_id
+ )
+
+
+@pytest.mark.parametrize("fail_if_error", [True, False])
+@patch(MODULE_NAME + "._cloudfront_paginate_build_full_result")
+def test_list_invalidations_failure(m_cloudfront_paginate_build_full_result, cloudfront_facts_service, fail_if_error):
+ distribution_id = MagicMock()
+ m_cloudfront_paginate_build_full_result.side_effect = raise_botocore_error()
+
+ if fail_if_error:
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.list_invalidations(distribution_id=distribution_id, fail_if_error=fail_if_error)
+ else:
+ with pytest.raises(botocore.exceptions.ClientError):
+ cloudfront_facts_service.list_invalidations(distribution_id=distribution_id, fail_if_error=fail_if_error)
+ m_cloudfront_paginate_build_full_result.assert_called_with(
+ cloudfront_facts_service.client, "list_invalidations", DistributionId=distribution_id
+ )
+
+
+@pytest.mark.parametrize(
+ "list_to_key,expected",
+ [
+ ([], {}),
+ (
+ [{"Id": "id_1", "Aliases": {}}, {"Id": "id_2", "Aliases": {"Items": ["alias_1", "alias_2"]}}],
+ {
+ "id_1": {"Id": "id_1", "Aliases": {}},
+ "id_2": {"Id": "id_2", "Aliases": {"Items": ["alias_1", "alias_2"]}},
+ "alias_1": {"Id": "id_2", "Aliases": {"Items": ["alias_1", "alias_2"]}},
+ "alias_2": {"Id": "id_2", "Aliases": {"Items": ["alias_1", "alias_2"]}},
+ },
+ ),
+ ],
+)
+def test_cloudfront_facts_keyed_list_helper(list_to_key, expected):
+ assert expected == cloudfront_facts_keyed_list_helper(list_to_key)
+
+
+@pytest.mark.parametrize(
+ "distribution,expected",
+ [
+ ({"Distribution": {"DistributionConfig": {"Aliases": {"Items": ["item_1", "item_2"]}}}}, ["item_1", "item_2"]),
+ ({"Distribution": {"DistributionConfig": {"Aliases": {}}}}, []),
+ ],
+)
+def test_get_aliases_from_distribution_id(cloudfront_facts_service, distribution, expected):
+ distribution_id = MagicMock()
+
+ cloudfront_facts_service.get_distribution = MagicMock()
+ cloudfront_facts_service.get_distribution.return_value = distribution
+ assert expected == cloudfront_facts_service.get_aliases_from_distribution_id(distribution_id)
+
+
+def test_get_aliases_from_distribution_id_failure(cloudfront_facts_service):
+ distribution_id = MagicMock()
+
+ cloudfront_facts_service.get_distribution = MagicMock()
+ cloudfront_facts_service.get_distribution.side_effect = raise_botocore_error()
+
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.get_aliases_from_distribution_id(distribution_id)
+ cloudfront_facts_service.get_distribution.assert_called_once_with(id=distribution_id)
+
+
+@pytest.mark.parametrize(
+ "distributions,streaming_distributions,domain_name,expected",
+ [
+ ([], [], MagicMock(), ""),
+ ([{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "id-01"}], [], "domain01", ""),
+ ([{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "id-01"}], [], "domain_01", "id-01"),
+ ([{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "id-01"}], [], "DOMAIN_01", "id-01"),
+ ([{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "id-01"}], [], "domain_02", "id-01"),
+ ([], [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "stream-01"}], "DOMAIN", ""),
+ ([], [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "stream-01"}], "DOMAIN_01", "stream-01"),
+ ([], [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "stream-01"}], "domain_01", "stream-01"),
+ ([], [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "stream-01"}], "domain_02", "stream-01"),
+ (
+ [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "id-01"}],
+ [{"Aliases": {"Items": ["domain_01", "domain_02"]}, "Id": "stream-01"}],
+ "domain_01",
+ "stream-01",
+ ),
+ ],
+)
+def test_get_distribution_id_from_domain_name(
+ cloudfront_facts_service, distributions, streaming_distributions, domain_name, expected
+):
+ cloudfront_facts_service.list_distributions = MagicMock()
+ cloudfront_facts_service.list_streaming_distributions = MagicMock()
+
+ cloudfront_facts_service.list_distributions.return_value = distributions
+ cloudfront_facts_service.list_streaming_distributions.return_value = streaming_distributions
+
+ assert expected == cloudfront_facts_service.get_distribution_id_from_domain_name(domain_name)
+
+ cloudfront_facts_service.list_distributions.assert_called_once_with(keyed=False)
+ cloudfront_facts_service.list_streaming_distributions.assert_called_once_with(keyed=False)
+
+
+@pytest.mark.parametrize("streaming", [True, False])
+def test_get_etag_from_distribution_id(cloudfront_facts_service, streaming):
+ distribution = {"ETag": MagicMock()}
+ streaming_distribution = {"ETag": MagicMock()}
+
+ distribution_id = MagicMock()
+
+ cloudfront_facts_service.get_distribution = MagicMock()
+ cloudfront_facts_service.get_distribution.return_value = distribution
+
+ cloudfront_facts_service.get_streaming_distribution = MagicMock()
+ cloudfront_facts_service.get_streaming_distribution.return_value = streaming_distribution
+
+ expected = distribution if not streaming else streaming_distribution
+
+ assert expected["ETag"] == cloudfront_facts_service.get_etag_from_distribution_id(distribution_id, streaming)
+ if not streaming:
+ cloudfront_facts_service.get_distribution.assert_called_once_with(id=distribution_id)
+ else:
+ cloudfront_facts_service.get_streaming_distribution.assert_called_once_with(id=distribution_id)
+
+
+@pytest.mark.parametrize(
+ "invalidations, expected",
+ [
+ ([], []),
+ ([{"Id": "id-01"}], ["id-01"]),
+ ([{"Id": "id-01"}, {"Id": "id-02"}], ["id-01", "id-02"]),
+ ],
+)
+def test_get_list_of_invalidation_ids_from_distribution_id(cloudfront_facts_service, invalidations, expected):
+ cloudfront_facts_service.list_invalidations = MagicMock()
+ cloudfront_facts_service.list_invalidations.return_value = invalidations
+
+ distribution_id = MagicMock()
+ assert expected == cloudfront_facts_service.get_list_of_invalidation_ids_from_distribution_id(distribution_id)
+ cloudfront_facts_service.list_invalidations.assert_called_with(distribution_id=distribution_id)
+
+
+def test_get_list_of_invalidation_ids_from_distribution_id_failure(cloudfront_facts_service):
+ cloudfront_facts_service.list_invalidations = MagicMock()
+ cloudfront_facts_service.list_invalidations.side_effect = raise_botocore_error()
+
+ distribution_id = MagicMock()
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.get_list_of_invalidation_ids_from_distribution_id(distribution_id)
+
+
+@pytest.mark.parametrize("streaming", [True, False])
+@pytest.mark.parametrize(
+ "distributions, expected",
+ [
+ ([], []),
+ (
+ [
+ {
+ "Id": "id_1",
+ "Aliases": {"Items": ["item_1", "item_2"]},
+ "WebACLId": "webacl_1",
+ "ARN": "arn:ditribution:us-east-1:1",
+ "Status": "available",
+ "LastModifiedTime": "11102022120000",
+ "DomainName": "domain_01.com",
+ "Comment": "This is the first distribution",
+ "PriceClass": "low",
+ "Enabled": "False",
+ "Tags": {"Items": [{"Name": "tag1", "Value": "distribution1"}]},
+ "ETag": "abcdefgh",
+ "_ids": [],
+ },
+ {
+ "Id": "id_2",
+ "Aliases": {"Items": ["item_20"]},
+ "WebACLId": "webacl_2",
+ "ARN": "arn:ditribution:us-west:2",
+ "Status": "active",
+ "LastModifiedTime": "11102022200000",
+ "DomainName": "another_domain_name.com",
+ "Comment": "This is the second distribution",
+ "PriceClass": "High",
+ "Enabled": "True",
+ "Tags": {
+ "Items": [
+ {"Name": "tag2", "Value": "distribution2"},
+ {"Name": "another_tag", "Value": "item 2"},
+ ]
+ },
+ "ETag": "ABCDEFGH",
+ "_ids": ["invalidation_1", "invalidation_2"],
+ },
+ ],
+ [
+ {
+ "Id": "id_1",
+ "ARN": "arn:ditribution:us-east-1:1",
+ "Status": "available",
+ "LastModifiedTime": "11102022120000",
+ "DomainName": "domain_01.com",
+ "Comment": "This is the first distribution",
+ "PriceClass": "low",
+ "Enabled": "False",
+ "Aliases": ["item_1", "item_2"],
+ "ETag": "abcdefgh",
+ "WebACLId": "webacl_1",
+ "Tags": [{"Name": "tag1", "Value": "distribution1"}],
+ },
+ {
+ "Id": "id_2",
+ "ARN": "arn:ditribution:us-west:2",
+ "Status": "active",
+ "LastModifiedTime": "11102022200000",
+ "DomainName": "another_domain_name.com",
+ "Comment": "This is the second distribution",
+ "PriceClass": "High",
+ "Enabled": "True",
+ "Aliases": ["item_20"],
+ "ETag": "ABCDEFGH",
+ "WebACLId": "webacl_2",
+ "Invalidations": ["invalidation_1", "invalidation_2"],
+ "Tags": [{"Name": "tag2", "Value": "distribution2"}, {"Name": "another_tag", "Value": "item 2"}],
+ },
+ ],
+ ),
+ ],
+)
+@patch(MODULE_NAME + ".boto3_tag_list_to_ansible_dict")
+def test_summary_get_distribution_list(
+ m_boto3_tag_list_to_ansible_dict, cloudfront_facts_service, streaming, distributions, expected
+):
+ m_boto3_tag_list_to_ansible_dict.side_effect = lambda x: x
+
+ cloudfront_facts_service.list_streaming_distributions = MagicMock()
+ cloudfront_facts_service.list_streaming_distributions.return_value = distributions
+
+ cloudfront_facts_service.list_distributions = MagicMock()
+ cloudfront_facts_service.list_distributions.return_value = distributions
+
+ cloudfront_facts_service.get_etag_from_distribution_id = MagicMock()
+ cloudfront_facts_service.get_etag_from_distribution_id.side_effect = lambda id, stream: [
+ x["ETag"] for x in distributions if x["Id"] == id
+ ][0]
+
+ cloudfront_facts_service.get_list_of_invalidation_ids_from_distribution_id = MagicMock()
+ cloudfront_facts_service.get_list_of_invalidation_ids_from_distribution_id.side_effect = lambda id: [
+ x["_ids"] for x in distributions if x["Id"] == id
+ ][0]
+
+ cloudfront_facts_service.list_resource_tags = MagicMock()
+ cloudfront_facts_service.list_resource_tags.side_effect = lambda arn: {
+ "Tags": x["Tags"] for x in distributions if x["ARN"] == arn
+ }
+
+ key_name = "streaming_distributions"
+ if not streaming:
+ key_name = "distributions"
+
+ if streaming:
+ expected = list(map(lambda x: {k: x[k] for k in x if k not in ("WebACLId", "Invalidations")}, expected))
+ assert {key_name: expected} == cloudfront_facts_service.summary_get_distribution_list(streaming)
+
+
+@pytest.mark.parametrize("streaming", [True, False])
+def test_summary_get_distribution_list_failure(cloudfront_facts_service, streaming):
+ cloudfront_facts_service.list_streaming_distributions = MagicMock()
+ cloudfront_facts_service.list_streaming_distributions.side_effect = raise_botocore_error()
+
+ cloudfront_facts_service.list_distributions = MagicMock()
+ cloudfront_facts_service.list_distributions.side_effect = raise_botocore_error()
+
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.summary_get_distribution_list(streaming)
+
+
+def test_summary(cloudfront_facts_service):
+ cloudfront_facts_service.summary_get_distribution_list = MagicMock()
+ cloudfront_facts_service.summary_get_distribution_list.side_effect = lambda x: (
+ {"called_with_true": True} if x else {"called_with_false": False}
+ )
+
+ cloudfront_facts_service.summary_get_origin_access_identity_list = MagicMock()
+ cloudfront_facts_service.summary_get_origin_access_identity_list.return_value = {
+ "origin_access_ids": ["access_1", "access_2"]
+ }
+
+ expected = {"called_with_true": True, "called_with_false": False, "origin_access_ids": ["access_1", "access_2"]}
+
+ assert expected == cloudfront_facts_service.summary()
+
+ cloudfront_facts_service.summary_get_origin_access_identity_list.assert_called_once()
+ cloudfront_facts_service.summary_get_distribution_list.assert_has_calls([call(True), call(False)], any_order=True)
+
+
+@pytest.mark.parametrize(
+ "origin_access_identities,expected",
+ [
+ ([], []),
+ (
+ [
+ {"Id": "some_id", "response": {"state": "active", "ETag": "some_Etag"}},
+ {"Id": "another_id", "response": {"ETag": "another_Etag"}},
+ ],
+ [{"Id": "some_id", "ETag": "some_Etag"}, {"Id": "another_id", "ETag": "another_Etag"}],
+ ),
+ ],
+)
+def test_summary_get_origin_access_identity_list(cloudfront_facts_service, origin_access_identities, expected):
+ cloudfront_facts_service.list_origin_access_identities = MagicMock()
+ cloudfront_facts_service.list_origin_access_identities.return_value = origin_access_identities
+ cloudfront_facts_service.get_origin_access_identity = MagicMock()
+ cloudfront_facts_service.get_origin_access_identity.side_effect = lambda x: [
+ o["response"] for o in origin_access_identities if o["Id"] == x
+ ][0]
+
+ assert {"origin_access_identities": expected} == cloudfront_facts_service.summary_get_origin_access_identity_list()
+
+
+def test_summary_get_origin_access_identity_list_failure(cloudfront_facts_service):
+ cloudfront_facts_service.list_origin_access_identities = MagicMock()
+ cloudfront_facts_service.list_origin_access_identities.side_effect = raise_botocore_error()
+
+ with pytest.raises(SystemExit):
+ cloudfront_facts_service.summary_get_origin_access_identity_list()
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_elbv2.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_elbv2.py
index 48c32c78e..d7293f0ce 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_elbv2.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_elbv2.py
@@ -4,11 +4,9 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import MagicMock
from ansible_collections.amazon.aws.plugins.module_utils import elbv2
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
one_action = [
{
@@ -21,7 +19,9 @@ one_action = [
}
],
},
- "TargetGroupArn": "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/my-tg-58045486/5b231e04f663ae21",
+ "TargetGroupArn": (
+ "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/my-tg-58045486/5b231e04f663ae21"
+ ),
"Type": "forward",
}
]
@@ -38,7 +38,7 @@ one_action_two_tg = [
{
"TargetGroupArn": "arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/my-tg-dadf7b62/be2f50b4041f11ed",
"Weight": 1,
- }
+ },
],
},
"Type": "forward",
@@ -50,8 +50,7 @@ def _sort_actions_one_entry():
assert elbv2._sort_actions(one_action) == one_action
-class TestElBV2Utils():
-
+class TestElBV2Utils:
def setup_method(self):
self.connection = MagicMock(name="connection")
self.module = MagicMock(name="module")
@@ -70,93 +69,41 @@ class TestElBV2Utils():
"IpAddressType": "ipv4",
"VpcId": "vpc-3ac0fb5f",
"AvailabilityZones": [
- {
- "ZoneName": "us-west-2a",
- "SubnetId": "subnet-8360a9e7"
- },
- {
- "ZoneName": "us-west-2b",
- "SubnetId": "subnet-b7d581c0"
- }
+ {"ZoneName": "us-west-2a", "SubnetId": "subnet-8360a9e7"},
+ {"ZoneName": "us-west-2b", "SubnetId": "subnet-b7d581c0"},
],
"CreatedTime": "2016-03-25T21:26:12.920Z",
"CanonicalHostedZoneId": "Z2P70J7EXAMPLE",
"DNSName": "my-load-balancer-424835706.us-west-2.elb.amazonaws.com",
- "SecurityGroups": [
- "sg-5943793c"
- ],
+ "SecurityGroups": ["sg-5943793c"],
"LoadBalancerName": "my-load-balancer",
- "State": {
- "Code": "active"
- },
- "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188"
- }
- self.paginate.build_full_result.return_value = {
- 'LoadBalancers': [self.loadbalancer]
+ "State": {"Code": "active"},
+ "LoadBalancerArn": (
+ "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188"
+ ),
}
+ self.paginate.build_full_result.return_value = {"LoadBalancers": [self.loadbalancer]}
self.connection.describe_load_balancer_attributes.return_value = {
"Attributes": [
- {
- "Value": "false",
- "Key": "access_logs.s3.enabled"
- },
- {
- "Value": "",
- "Key": "access_logs.s3.bucket"
- },
- {
- "Value": "",
- "Key": "access_logs.s3.prefix"
- },
- {
- "Value": "60",
- "Key": "idle_timeout.timeout_seconds"
- },
- {
- "Value": "false",
- "Key": "deletion_protection.enabled"
- },
- {
- "Value": "true",
- "Key": "routing.http2.enabled"
- },
- {
- "Value": "defensive",
- "Key": "routing.http.desync_mitigation_mode"
- },
- {
- "Value": "true",
- "Key": "routing.http.drop_invalid_header_fields.enabled"
- },
- {
- "Value": "true",
- "Key": "routing.http.x_amzn_tls_version_and_cipher_suite.enabled"
- },
- {
- "Value": "true",
- "Key": "routing.http.xff_client_port.enabled"
- },
- {
- "Value": "true",
- "Key": "waf.fail_open.enabled"
- },
+ {"Value": "false", "Key": "access_logs.s3.enabled"},
+ {"Value": "", "Key": "access_logs.s3.bucket"},
+ {"Value": "", "Key": "access_logs.s3.prefix"},
+ {"Value": "60", "Key": "idle_timeout.timeout_seconds"},
+ {"Value": "false", "Key": "deletion_protection.enabled"},
+ {"Value": "true", "Key": "routing.http2.enabled"},
+ {"Value": "defensive", "Key": "routing.http.desync_mitigation_mode"},
+ {"Value": "true", "Key": "routing.http.drop_invalid_header_fields.enabled"},
+ {"Value": "true", "Key": "routing.http.x_amzn_tls_version_and_cipher_suite.enabled"},
+ {"Value": "true", "Key": "routing.http.xff_client_port.enabled"},
+ {"Value": "true", "Key": "waf.fail_open.enabled"},
]
}
self.connection.describe_tags.return_value = {
"TagDescriptions": [
{
"ResourceArn": "arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188",
- "Tags": [
- {
- "Value": "ansible",
- "Key": "project"
- },
- {
- "Value": "RedHat",
- "Key": "company"
- }
- ]
+ "Tags": [{"Value": "ansible", "Key": "project"}, {"Value": "RedHat", "Key": "company"}],
}
]
}
@@ -172,7 +119,7 @@ class TestElBV2Utils():
self.connection.describe_tags.assert_called_once()
self.conn_paginator.paginate.assert_called_once()
# assert we got the expected value
- assert return_value == 'ipv4'
+ assert return_value == "ipv4"
# Test modify_ip_address_type idempotency
def test_modify_ip_address_type_idempotency(self):
@@ -206,7 +153,7 @@ class TestElBV2Utils():
"routing_http_drop_invalid_header_fields_enabled": "true",
"routing_http_x_amzn_tls_version_and_cipher_suite_enabled": "true",
"routing_http_xff_client_port_enabled": "true",
- "waf_fail_open_enabled": "true"
+ "waf_fail_open_enabled": "true",
}
# Run module
actual_elb_attributes = self.elbv2obj.get_elb_attributes()
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_iam.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_get_aws_account_id.py
index 4ce430262..c91073288 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_iam.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_get_aws_account_id.py
@@ -4,8 +4,7 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import MagicMock
import pytest
@@ -15,8 +14,6 @@ except ImportError:
# Handled by HAS_BOTO3
pass
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
-
import ansible_collections.amazon.aws.plugins.module_utils.iam as utils_iam
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import HAS_BOTO3
@@ -24,62 +21,64 @@ if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_iam.py requires the python modules 'boto3' and 'botocore'")
-class TestIamUtils():
-
+class TestIamUtils:
def _make_denied_exception(self, partition):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "AccessDenied",
- "Message": "User: arn:" + partition + ":iam::123456789012:user/ExampleUser "
- + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ "Message": (
+ "User: arn:"
+ + partition
+ + ":iam::123456789012:user/ExampleUser "
+ + "is not authorized to perform: iam:GetUser on resource: user ExampleUser"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'getUser')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "getUser",
+ )
def _make_unexpected_exception(self):
return botocore.exceptions.ClientError(
{
- "Error": {
- "Code": "SomeThingWentWrong",
- "Message": "Boom!"
- },
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "Error": {"Code": "SomeThingWentWrong", "Message": "Boom!"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_encoded_exception(self):
return botocore.exceptions.ClientError(
{
"Error": {
"Code": "AccessDenied",
- "Message": "You are not authorized to perform this operation. Encoded authorization failure message: " +
- "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ" +
- "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU" +
- "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26" +
- "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx" +
- "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf" +
- "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI" +
- "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp" +
- "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB" +
- "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ "Message": (
+ "You are not authorized to perform this operation. Encoded authorization failure message: "
+ + "fEwXX6llx3cClm9J4pURgz1XPnJPrYexEbrJcLhFkwygMdOgx_-aEsj0LqRM6Kxt2HVI6prUhDwbJqBo9U2V7iRKZ"
+ + "T6ZdJvHH02cXmD0Jwl5vrTsf0PhBcWYlH5wl2qME7xTfdolEUr4CzumCiti7ETiO-RDdHqWlasBOW5bWsZ4GSpPdU"
+ + "06YAX0TfwVBs48uU5RpCHfz1uhSzez-3elbtp9CmTOHLt5pzJodiovccO55BQKYLPtmJcs6S9YLEEogmpI4Cb1D26"
+ + "fYahDh51jEmaohPnW5pb1nQe2yPEtuIhtRzNjhFCOOMwY5DBzNsymK-Gj6eJLm7FSGHee4AHLU_XmZMe_6bcLAiOx"
+ + "6Zdl65Kdd0hLcpwVxyZMi27HnYjAdqRlV3wuCW2PkhAW14qZQLfiuHZDEwnPe2PBGSlFcCmkQvJvX-YLoA7Uyc2wf"
+ + "NX5RJm38STwfiJSkQaNDhHKTWKiLOsgY4Gze6uZoG7zOcFXFRyaA4cbMmI76uyBO7j-9uQUCtBYqYto8x_9CUJcxI"
+ + "VC5SPG_C1mk-WoDMew01f0qy-bNaCgmJ9TOQGd08FyuT1SaMpCC0gX6mHuOnEgkFw3veBIowMpp9XcM-yc42fmIOp"
+ + "FOdvQO6uE9p55Qc-uXvsDTTvT3A7EeFU8a_YoAIt9UgNYM6VTvoprLz7dBI_P6C-bdPPZCY2amm-dJNVZelT6TbJB"
+ + "H_Vxh0fzeiSUBersy_QzB0moc-vPWgnB-IkgnYLV-4L3K0L2"
+ ),
},
- "ResponseMetadata": {
- "RequestId": "01234567-89ab-cdef-0123-456789abcdef"
- }
- }, 'someCall')
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "someCall",
+ )
def _make_botocore_exception(self):
- return botocore.exceptions.EndpointConnectionError(endpoint_url='junk.endpoint')
+ return botocore.exceptions.EndpointConnectionError(endpoint_url="junk.endpoint")
def setup_method(self):
self.sts_client = MagicMock()
self.iam_client = MagicMock()
self.module = MagicMock()
- clients = {'sts': self.sts_client, 'iam': self.iam_client}
+ clients = {"sts": self.sts_client, "iam": self.iam_client}
def get_client(*args, **kwargs):
return clients[args[0]]
@@ -95,74 +94,103 @@ class TestIamUtils():
# Test the simplest case - We're permitted to call GetCallerIdentity
def test_get_aws_account_id__caller_success(self):
# Prepare
- self.sts_client.get_caller_identity.side_effect = [{'UserId': 'AIDA12345EXAMPLE54321',
- 'Account': '123456789012',
- 'Arn': 'arn:aws:iam::123456789012:user/ExampleUser'}]
+ self.sts_client.get_caller_identity.side_effect = [
+ {
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Account": "123456789012",
+ "Arn": "arn:aws:iam::123456789012:user/ExampleUser",
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_id(self.module)
# Check we only saw the calls we mocked out
self.module.client.assert_called_once()
self.sts_client.get_caller_identity.assert_called_once()
# Check we got the values back we expected.
- assert return_value == '123456789012'
+ assert return_value == "123456789012"
# Test the simplest case - We're permitted to call GetCallerIdentity
# (China partition)
def test_get_aws_account_id__caller_success_cn(self):
# Prepare
- self.sts_client.get_caller_identity.side_effect = [{'UserId': 'AIDA12345EXAMPLE54321',
- 'Account': '123456789012',
- 'Arn': 'arn:aws-cn:iam::123456789012:user/ExampleUser'}]
+ self.sts_client.get_caller_identity.side_effect = [
+ {
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Account": "123456789012",
+ "Arn": "arn:aws-cn:iam::123456789012:user/ExampleUser",
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_id(self.module)
# Check we only saw the calls we mocked out
self.module.client.assert_called_once()
self.sts_client.get_caller_identity.assert_called_once()
# Check we got the values back we expected.
- assert return_value == '123456789012'
+ assert return_value == "123456789012"
# ========== get_aws_account_info ============
# Test the simplest case - We're permitted to call GetCallerIdentity
def test_get_aws_account_info__caller_success(self):
# Prepare
- self.sts_client.get_caller_identity.side_effect = [{'UserId': 'AIDA12345EXAMPLE54321',
- 'Account': '123456789012',
- 'Arn': 'arn:aws:iam::123456789012:user/ExampleUser'}]
+ self.sts_client.get_caller_identity.side_effect = [
+ {
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Account": "123456789012",
+ "Arn": "arn:aws:iam::123456789012:user/ExampleUser",
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
self.module.client.assert_called_once()
self.sts_client.get_caller_identity.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws',)
+ assert return_value == (
+ "123456789012",
+ "aws",
+ )
# (China partition)
def test_get_aws_account_info__caller_success_cn(self):
# Prepare
- self.sts_client.get_caller_identity.side_effect = [{'UserId': 'AIDA12345EXAMPLE54321',
- 'Account': '123456789012',
- 'Arn': 'arn:aws-cn:iam::123456789012:user/ExampleUser'}]
+ self.sts_client.get_caller_identity.side_effect = [
+ {
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Account": "123456789012",
+ "Arn": "arn:aws-cn:iam::123456789012:user/ExampleUser",
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
self.module.client.assert_called_once()
self.sts_client.get_caller_identity.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-cn',)
+ assert return_value == (
+ "123456789012",
+ "aws-cn",
+ )
# (US-Gov partition)
def test_get_aws_account_info__caller_success_gov(self):
# Prepare
- self.sts_client.get_caller_identity.side_effect = [{'UserId': 'AIDA12345EXAMPLE54321',
- 'Account': '123456789012',
- 'Arn': 'arn:aws-us-gov:iam::123456789012:user/ExampleUser'}]
+ self.sts_client.get_caller_identity.side_effect = [
+ {
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Account": "123456789012",
+ "Arn": "arn:aws-us-gov:iam::123456789012:user/ExampleUser",
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
self.module.client.assert_called_once()
self.sts_client.get_caller_identity.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-us-gov',)
+ assert return_value == (
+ "123456789012",
+ "aws-us-gov",
+ )
# If sts:get_caller_identity fails (most likely something wierd on the
# client side), then try a few extra options.
@@ -170,8 +198,17 @@ class TestIamUtils():
def test_get_aws_account_info__user_success(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [{"User": {"Path": "/", "UserName": "ExampleUser", "UserId": "AIDA12345EXAMPLE54321",
- "Arn": "arn:aws:iam::123456789012:user/ExampleUser", "CreateDate": "2020-09-08T14:04:32Z"}}]
+ self.iam_client.get_user.side_effect = [
+ {
+ "User": {
+ "Path": "/",
+ "UserName": "ExampleUser",
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Arn": "arn:aws:iam::123456789012:user/ExampleUser",
+ "CreateDate": "2020-09-08T14:04:32Z",
+ }
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -179,14 +216,26 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws',)
+ assert return_value == (
+ "123456789012",
+ "aws",
+ )
# (China partition)
def test_get_aws_account_info__user_success_cn(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [{"User": {"Path": "/", "UserName": "ExampleUser", "UserId": "AIDA12345EXAMPLE54321",
- "Arn": "arn:aws-cn:iam::123456789012:user/ExampleUser", "CreateDate": "2020-09-08T14:04:32Z"}}]
+ self.iam_client.get_user.side_effect = [
+ {
+ "User": {
+ "Path": "/",
+ "UserName": "ExampleUser",
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Arn": "arn:aws-cn:iam::123456789012:user/ExampleUser",
+ "CreateDate": "2020-09-08T14:04:32Z",
+ }
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -194,14 +243,26 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-cn',)
+ assert return_value == (
+ "123456789012",
+ "aws-cn",
+ )
# (US-Gov partition)
def test_get_aws_account_info__user_success_gov(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [{"User": {"Path": "/", "UserName": "ExampleUser", "UserId": "AIDA12345EXAMPLE54321",
- "Arn": "arn:aws-us-gov:iam::123456789012:user/ExampleUser", "CreateDate": "2020-09-08T14:04:32Z"}}]
+ self.iam_client.get_user.side_effect = [
+ {
+ "User": {
+ "Path": "/",
+ "UserName": "ExampleUser",
+ "UserId": "AIDA12345EXAMPLE54321",
+ "Arn": "arn:aws-us-gov:iam::123456789012:user/ExampleUser",
+ "CreateDate": "2020-09-08T14:04:32Z",
+ }
+ }
+ ]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -209,13 +270,16 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-us-gov',)
+ assert return_value == (
+ "123456789012",
+ "aws-us-gov",
+ )
# Test response if STS and IAM fails and we need to fall back to the denial message
def test_get_aws_account_info__user_denied(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [self._make_denied_exception('aws')]
+ self.iam_client.get_user.side_effect = [self._make_denied_exception("aws")]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -223,13 +287,16 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws',)
+ assert return_value == (
+ "123456789012",
+ "aws",
+ )
# (China partition)
def test_get_aws_account_info__user_denied_cn(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [self._make_denied_exception('aws-cn')]
+ self.iam_client.get_user.side_effect = [self._make_denied_exception("aws-cn")]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -237,13 +304,16 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-cn',)
+ assert return_value == (
+ "123456789012",
+ "aws-cn",
+ )
# (US-Gov partition)
def test_get_aws_account_info__user_denied_gov(self):
# Prepare
self.sts_client.get_caller_identity.side_effect = [self._make_botocore_exception()]
- self.iam_client.get_user.side_effect = [self._make_denied_exception('aws-us-gov')]
+ self.iam_client.get_user.side_effect = [self._make_denied_exception("aws-us-gov")]
# Run module
return_value = utils_iam.get_aws_account_info(self.module)
# Check we only saw the calls we mocked out
@@ -251,7 +321,10 @@ class TestIamUtils():
self.sts_client.get_caller_identity.assert_called_once()
self.iam_client.get_user.assert_called_once()
# Check we got the values back we expected.
- assert return_value == ('123456789012', 'aws-us-gov',)
+ assert return_value == (
+ "123456789012",
+ "aws-us-gov",
+ )
# Test that we fail gracefully if Boto throws exceptions at us...
def test_get_aws_account_info__boto_failures(self):
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_rds.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_rds.py
index 9d96d44a8..51a715151 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_rds.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_rds.py
@@ -3,10 +3,9 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
import sys
+from unittest.mock import MagicMock
+
import pytest
if sys.version_info < (3, 7):
@@ -20,8 +19,6 @@ except ImportError:
# Handled by HAS_BOTO3
pass
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
-
from ansible_collections.amazon.aws.plugins.module_utils import rds
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
@@ -37,13 +34,11 @@ def error(*args, **kwargs):
return MagicMock(), pytest.raises(*args, **kwargs)
-def build_exception(
- operation_name, code=None, message=None, http_status_code=None, error=True
-):
+def build_exception(operation_name, code=None, message=None, http_status_code=None, error=True):
# Support skipping the test is botocore isn't installed
# (called by parametrize before skip is evaluated)
if not HAS_BOTO3:
- return Exception('MissingBotoCore')
+ return Exception("MissingBotoCore")
response = {}
if error or code or message:
response["Error"] = {}
@@ -74,9 +69,7 @@ def test__wait_for_cluster_snapshot_status(waiter_name):
"db_snapshot_available",
"Failed to wait for DB snapshot test to be available",
),
- (
- "db_snapshot_deleted",
- "Failed to wait for DB snapshot test to be deleted"),
+ ("db_snapshot_deleted", "Failed to wait for DB snapshot test to be deleted"),
],
)
def test__wait_for_instance_snapshot_status_failed(input, expected):
@@ -125,8 +118,8 @@ def test__wait_for_cluster_snapshot_status_failed(input, expected):
name="delete_db_cluster",
waiter="cluster_deleted",
operation_description="delete DB cluster",
- resource='cluster',
- retry_codes=['InvalidDBClusterState']
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
)
),
),
@@ -140,8 +133,38 @@ def test__wait_for_cluster_snapshot_status_failed(input, expected):
name="create_db_cluster",
waiter="cluster_available",
operation_description="create DB cluster",
- resource='cluster',
- retry_codes=['InvalidDBClusterState']
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
+ )
+ ),
+ ),
+ (
+ "start_db_cluster",
+ {
+ "new_db_cluster_identifier": "test",
+ },
+ *expected(
+ rds.Boto3ClientMethod(
+ name="start_db_cluster",
+ waiter="cluster_available",
+ operation_description="start DB cluster",
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
+ )
+ ),
+ ),
+ (
+ "stop_db_cluster",
+ {
+ "new_db_cluster_identifier": "test",
+ },
+ *expected(
+ rds.Boto3ClientMethod(
+ name="stop_db_cluster",
+ waiter="cluster_available",
+ operation_description="stop DB cluster",
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
)
),
),
@@ -155,8 +178,8 @@ def test__wait_for_cluster_snapshot_status_failed(input, expected):
name="restore_db_cluster_from_snapshot",
waiter="cluster_available",
operation_description="restore DB cluster from snapshot",
- resource='cluster',
- retry_codes=['InvalidDBClusterSnapshotState']
+ resource="cluster",
+ retry_codes=["InvalidDBClusterSnapshotState"],
)
),
),
@@ -170,8 +193,8 @@ def test__wait_for_cluster_snapshot_status_failed(input, expected):
name="modify_db_cluster",
waiter="cluster_available",
operation_description="modify DB cluster",
- resource='cluster',
- retry_codes=['InvalidDBClusterState']
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
)
),
),
@@ -185,34 +208,29 @@ def test__wait_for_cluster_snapshot_status_failed(input, expected):
name="list_tags_for_resource",
waiter="cluster_available",
operation_description="list tags for resource",
- resource='cluster',
- retry_codes=['InvalidDBClusterState']
+ resource="cluster",
+ retry_codes=["InvalidDBClusterState"],
)
),
),
(
"fake_method",
- {
- "wait": False
- },
+ {"wait": False},
*expected(
rds.Boto3ClientMethod(
- name="fake_method",
- waiter="",
- operation_description="fake method",
- resource='',
- retry_codes=[]
+ name="fake_method", waiter="", operation_description="fake method", resource="", retry_codes=[]
)
),
),
(
"fake_method",
- {
- "wait": True
- },
+ {"wait": True},
*error(
NotImplementedError,
- match="method fake_method hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method fake_method hasn't been added to the list of accepted methods to use a waiter in"
+ " module_utils/rds.py"
+ ),
),
),
],
@@ -237,8 +255,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="delete_db_instance",
waiter="db_instance_deleted",
operation_description="delete DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -252,8 +270,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="create_db_instance",
waiter="db_instance_available",
operation_description="create DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -267,8 +285,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="stop_db_instance",
waiter="db_instance_stopped",
operation_description="stop DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -282,8 +300,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="promote_read_replica",
waiter="read_replica_promoted",
operation_description="promote read replica",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -297,8 +315,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="restore_db_instance_from_db_snapshot",
waiter="db_instance_available",
operation_description="restore DB instance from DB snapshot",
- resource='instance',
- retry_codes=['InvalidDBSnapshotState']
+ resource="instance",
+ retry_codes=["InvalidDBSnapshotState"],
)
),
),
@@ -312,8 +330,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="modify_db_instance",
waiter="db_instance_available",
operation_description="modify DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -327,8 +345,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="add_role_to_db_instance",
waiter="role_associated",
operation_description="add role to DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -342,8 +360,8 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="remove_role_from_db_instance",
waiter="role_disassociated",
operation_description="remove role from DB instance",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
@@ -357,34 +375,29 @@ def test__get_rds_method_attribute_cluster(method_name, params, expected, error)
name="list_tags_for_resource",
waiter="db_instance_available",
operation_description="list tags for resource",
- resource='instance',
- retry_codes=['InvalidDBInstanceState', 'InvalidDBSecurityGroupState']
+ resource="instance",
+ retry_codes=["InvalidDBInstanceState", "InvalidDBSecurityGroupState"],
)
),
),
(
"fake_method",
- {
- "wait": False
- },
+ {"wait": False},
*expected(
rds.Boto3ClientMethod(
- name="fake_method",
- waiter="",
- operation_description="fake method",
- resource='',
- retry_codes=[]
+ name="fake_method", waiter="", operation_description="fake method", resource="", retry_codes=[]
)
),
),
(
"fake_method",
- {
- "wait": True
- },
+ {"wait": True},
*error(
NotImplementedError,
- match="method fake_method hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method fake_method hasn't been added to the list of accepted methods to use a waiter in"
+ " module_utils/rds.py"
+ ),
),
),
],
@@ -409,8 +422,8 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="delete_db_snapshot",
waiter="db_snapshot_deleted",
operation_description="delete DB snapshot",
- resource='instance_snapshot',
- retry_codes=['InvalidDBSnapshotState']
+ resource="instance_snapshot",
+ retry_codes=["InvalidDBSnapshotState"],
)
),
),
@@ -424,24 +437,21 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="create_db_snapshot",
waiter="db_snapshot_available",
operation_description="create DB snapshot",
- resource='instance_snapshot',
- retry_codes=['InvalidDBInstanceState']
+ resource="instance_snapshot",
+ retry_codes=["InvalidDBInstanceState"],
)
),
),
(
"copy_db_snapshot",
- {
- "source_db_snapshot_identifier": "test",
- "db_snapshot_identifier": "test-copy"
- },
+ {"source_db_snapshot_identifier": "test", "db_snapshot_identifier": "test-copy"},
*expected(
rds.Boto3ClientMethod(
name="copy_db_snapshot",
waiter="db_snapshot_available",
operation_description="copy DB snapshot",
- resource='instance_snapshot',
- retry_codes=['InvalidDBSnapshotState']
+ resource="instance_snapshot",
+ retry_codes=["InvalidDBSnapshotState"],
)
),
),
@@ -455,8 +465,8 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="list_tags_for_resource",
waiter="db_snapshot_available",
operation_description="list tags for resource",
- resource='instance_snapshot',
- retry_codes=['InvalidDBSnapshotState']
+ resource="instance_snapshot",
+ retry_codes=["InvalidDBSnapshotState"],
)
),
),
@@ -470,8 +480,8 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="delete_db_cluster_snapshot",
waiter="db_cluster_snapshot_deleted",
operation_description="delete DB cluster snapshot",
- resource='cluster_snapshot',
- retry_codes=['InvalidDBClusterSnapshotState']
+ resource="cluster_snapshot",
+ retry_codes=["InvalidDBClusterSnapshotState"],
)
),
),
@@ -485,24 +495,21 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="create_db_cluster_snapshot",
waiter="db_cluster_snapshot_available",
operation_description="create DB cluster snapshot",
- resource='cluster_snapshot',
- retry_codes=['InvalidDBClusterState']
+ resource="cluster_snapshot",
+ retry_codes=["InvalidDBClusterState"],
)
),
),
(
"copy_db_cluster_snapshot",
- {
- "source_db_cluster_snapshot_identifier": "test",
- "db_cluster_snapshot_identifier": "test-copy"
- },
+ {"source_db_cluster_snapshot_identifier": "test", "db_cluster_snapshot_identifier": "test-copy"},
*expected(
rds.Boto3ClientMethod(
name="copy_db_cluster_snapshot",
waiter="db_cluster_snapshot_available",
operation_description="copy DB cluster snapshot",
- resource='cluster_snapshot',
- retry_codes=['InvalidDBClusterSnapshotState']
+ resource="cluster_snapshot",
+ retry_codes=["InvalidDBClusterSnapshotState"],
)
),
),
@@ -516,34 +523,29 @@ def test__get_rds_method_attribute_instance(method_name, params, expected, error
name="list_tags_for_resource",
waiter="db_cluster_snapshot_available",
operation_description="list tags for resource",
- resource='cluster_snapshot',
- retry_codes=['InvalidDBClusterSnapshotState']
+ resource="cluster_snapshot",
+ retry_codes=["InvalidDBClusterSnapshotState"],
)
),
),
(
"fake_method",
- {
- "wait": False
- },
+ {"wait": False},
*expected(
rds.Boto3ClientMethod(
- name="fake_method",
- waiter="",
- operation_description="fake method",
- resource='',
- retry_codes=[]
+ name="fake_method", waiter="", operation_description="fake method", resource="", retry_codes=[]
)
),
),
(
"fake_method",
- {
- "wait": True
- },
+ {"wait": True},
*error(
NotImplementedError,
- match="method fake_method hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method fake_method hasn't been added to the list of accepted methods to use a waiter in"
+ " module_utils/rds.py"
+ ),
),
),
],
@@ -558,19 +560,10 @@ def test__get_rds_method_attribute_snapshot(method_name, params, expected, error
@pytest.mark.parametrize(
"method_name, params, expected",
[
+ ("create_db_snapshot", {"db_snapshot_identifier": "test"}, "test"),
(
"create_db_snapshot",
- {
- "db_snapshot_identifier": "test"
- },
- "test"
- ),
- (
- "create_db_snapshot",
- {
- "db_snapshot_identifier": "test",
- "apply_immediately": True
- },
+ {"db_snapshot_identifier": "test", "apply_immediately": True},
"test",
),
(
@@ -583,10 +576,7 @@ def test__get_rds_method_attribute_snapshot(method_name, params, expected, error
),
(
"create_db_snapshot",
- {
- "db_snapshot_identifier": "test",
- "apply_immediately": True
- },
+ {"db_snapshot_identifier": "test", "apply_immediately": True},
"test",
),
(
@@ -608,10 +598,7 @@ def test__get_rds_method_attribute_snapshot(method_name, params, expected, error
),
(
"create_db_snapshot",
- {
- "db_snapshot_identifier": "test",
- "apply_immediately": True
- },
+ {"db_snapshot_identifier": "test", "apply_immediately": True},
"test",
),
(
@@ -680,7 +667,8 @@ def test__handle_errors(method_name, exception, expected):
message="ModifyDbCluster API",
),
*expected(
- "It appears you are trying to modify attributes that are managed at the cluster level. Please see rds_cluster"
+ "It appears you are trying to modify attributes that are managed at the cluster level. Please see"
+ " rds_cluster"
),
),
(
@@ -688,7 +676,10 @@ def test__handle_errors(method_name, exception, expected):
build_exception("modify_db_instance", code="InvalidParameterCombination"),
*error(
NotImplementedError,
- match="method modify_db_instance hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method modify_db_instance hasn't been added to the list of accepted methods to use a waiter in"
+ " module_utils/rds.py"
+ ),
),
),
(
@@ -696,25 +687,27 @@ def test__handle_errors(method_name, exception, expected):
build_exception("promote_read_replica", code="InvalidDBInstanceState"),
*error(
NotImplementedError,
- match="method promote_read_replica hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method promote_read_replica hasn't been added to the list of accepted methods to use a waiter in"
+ " module_utils/rds.py"
+ ),
),
),
(
"promote_read_replica_db_cluster",
- build_exception(
- "promote_read_replica_db_cluster", code="InvalidDBClusterStateFault"
- ),
+ build_exception("promote_read_replica_db_cluster", code="InvalidDBClusterStateFault"),
*error(
NotImplementedError,
- match="method promote_read_replica_db_cluster hasn't been added to the list of accepted methods to use a waiter in module_utils/rds.py",
+ match=(
+ "method promote_read_replica_db_cluster hasn't been added to the list of accepted methods to use a"
+ " waiter in module_utils/rds.py"
+ ),
),
),
(
"create_db_cluster",
build_exception("create_db_cluster", code="InvalidParameterValue"),
- *expected(
- "DB engine fake_engine should be one of aurora, aurora-mysql, aurora-postgresql"
- ),
+ *expected("DB engine fake_engine should be one of aurora, aurora-mysql, aurora-postgresql"),
),
],
)
@@ -727,25 +720,15 @@ def test__handle_errors_failed(method_name, exception, expected, error):
module.fail_json_aws.call_args[1]["msg"] == expected
-class TestRdsUtils():
-
+class TestRdsUtils:
# ========================================================
# Setup some initial data that we can use within our tests
# ========================================================
def setup_method(self):
self.target_role_list = [
- {
- 'role_arn': 'role_won',
- 'feature_name': 's3Export'
- },
- {
- 'role_arn': 'role_too',
- 'feature_name': 'Lambda'
- },
- {
- 'role_arn': 'role_thrie',
- 'feature_name': 's3Import'
- }
+ {"role_arn": "role_won", "feature_name": "s3Export"},
+ {"role_arn": "role_too", "feature_name": "Lambda"},
+ {"role_arn": "role_thrie", "feature_name": "s3Import"},
]
# ========================================================
@@ -779,11 +762,7 @@ class TestRdsUtils():
assert self.target_role_list == roles_to_delete
def test_compare_iam_roles_different(self):
- existing_list = [
- {
- 'role_arn': 'role_wonn',
- 'feature_name': 's3Export'
- }]
+ existing_list = [{"role_arn": "role_wonn", "feature_name": "s3Export"}]
roles_to_add, roles_to_delete = rds.compare_iam_roles(existing_list, self.target_role_list, purge_roles=False)
assert self.target_role_list == roles_to_add
assert [] == roles_to_delete
@@ -792,11 +771,7 @@ class TestRdsUtils():
assert existing_list == roles_to_delete
existing_list = self.target_role_list.copy()
- self.target_role_list = [
- {
- 'role_arn': 'role_wonn',
- 'feature_name': 's3Export'
- }]
+ self.target_role_list = [{"role_arn": "role_wonn", "feature_name": "s3Export"}]
roles_to_add, roles_to_delete = rds.compare_iam_roles(existing_list, self.target_role_list, purge_roles=False)
assert self.target_role_list == roles_to_add
assert [] == roles_to_delete
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_s3.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_s3.py
index 42c8ecfd0..3770064c5 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_s3.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_s3.py
@@ -4,83 +4,292 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+import random
+import string
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
-__metaclass__ = type
+import pytest
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
from ansible_collections.amazon.aws.plugins.module_utils import s3
-from ansible.module_utils.basic import AnsibleModule
-import pytest
+try:
+ import botocore
+except ImportError:
+ pass
+
+
+def generate_random_string(size, include_digits=True):
+ buffer = string.ascii_lowercase
+ if include_digits:
+ buffer += string.digits
+
+ return "".join(random.choice(buffer) for i in range(size))
+
+
+@pytest.mark.parametrize("parts", range(0, 10, 3))
+@pytest.mark.parametrize("version", [True, False])
+def test_s3_head_objects(parts, version):
+ client = MagicMock()
+
+ s3bucket_name = f"s3-bucket-{generate_random_string(8, False)}"
+ s3bucket_object = f"s3-bucket-object-{generate_random_string(8, False)}"
+ versionId = None
+ if version:
+ versionId = random.randint(0, 1000)
+
+ total = 0
+ for head in s3.s3_head_objects(client, parts, s3bucket_name, s3bucket_object, versionId):
+ assert head == client.head_object.return_value
+ total += 1
+
+ assert total == parts
+ params = {"Bucket": s3bucket_name, "Key": s3bucket_object}
+ if versionId:
+ params["VersionId"] = versionId
+
+ api_calls = [call(PartNumber=i, **params) for i in range(1, parts + 1)]
+ client.head_object.assert_has_calls(api_calls, any_order=True)
+
+
+def raise_botoclient_exception():
+ params = {
+ "Error": {"Code": 1, "Message": "Something went wrong"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ }
+ return botocore.exceptions.ClientError(params, "some_called_method")
+
+
+@pytest.mark.parametrize("use_file", [False, True])
+@pytest.mark.parametrize("parts", range(0, 10, 3))
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.md5")
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.s3_head_objects")
+def test_calculate_checksum(m_s3_head_objects, m_s3_md5, use_file, parts, tmp_path):
+ client = MagicMock()
+ mock_md5 = m_s3_md5.return_value
+
+ mock_md5.digest.return_value = b"1"
+ mock_md5.hexdigest.return_value = "".join(["f" for i in range(32)])
+
+ m_s3_head_objects.return_value = [{"ContentLength": f"{int(i + 1)}"} for i in range(parts)]
+
+ content = b'"f20e84ac3d0c33cea77b3f29e3323a09"'
+ test_function = s3.calculate_checksum_with_content
+ if use_file:
+ test_function = s3.calculate_checksum_with_file
+ test_dir = tmp_path / "test_s3"
+ test_dir.mkdir()
+ etag_file = test_dir / "etag.bin"
+ etag_file.write_bytes(content)
+
+ content = str(etag_file)
+
+ s3bucket_name = f"s3-bucket-{generate_random_string(8, False)}"
+ s3bucket_object = f"s3-bucket-object-{generate_random_string(8, False)}"
+ version = random.randint(0, 1000)
+
+ result = test_function(client, parts, s3bucket_name, s3bucket_object, version, content)
+
+ expected = f'"{mock_md5.hexdigest.return_value}-{parts}"'
+ assert result == expected
+
+ mock_md5.digest.assert_has_calls([call() for i in range(parts)])
+ mock_md5.hexdigest.assert_called_once()
+
+ m_s3_head_objects.assert_called_once_with(client, parts, s3bucket_name, s3bucket_object, version)
+
+@pytest.mark.parametrize("etag_multipart", [True, False])
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.calculate_checksum_with_file")
+def test_calculate_etag(m_checksum_file, etag_multipart):
+ module = MagicMock()
+ client = MagicMock()
+
+ module.fail_json_aws.side_effect = SystemExit(2)
+ module.md5.return_value = generate_random_string(32)
+
+ s3bucket_name = f"s3-bucket-{generate_random_string(8, False)}"
+ s3bucket_object = f"s3-bucket-object-{generate_random_string(8, False)}"
+ version = random.randint(0, 1000)
+ parts = 3
+
+ etag = '"f20e84ac3d0c33cea77b3f29e3323a09"'
+ digest = '"9aa254f7f76fd14435b21e9448525b99"'
-class FakeAnsibleModule(AnsibleModule):
- def __init__(self):
- pass
+ file_name = generate_random_string(32)
+ if not etag_multipart:
+ result = s3.calculate_etag(module, file_name, etag, client, s3bucket_name, s3bucket_object, version)
+ assert result == f'"{module.md5.return_value}"'
+ module.md5.assert_called_once_with(file_name)
+ else:
+ etag = f'"f20e84ac3d0c33cea77b3f29e3323a09-{parts}"'
+ m_checksum_file.return_value = digest
+ assert digest == s3.calculate_etag(module, file_name, etag, client, s3bucket_name, s3bucket_object, version)
-def test_calculate_etag_single_part(tmp_path_factory):
- module = FakeAnsibleModule()
- my_image = tmp_path_factory.mktemp("data") / "my.txt"
- my_image.write_text("Hello World!")
+ m_checksum_file.assert_called_with(client, parts, s3bucket_name, s3bucket_object, version, file_name)
+
+
+@pytest.mark.parametrize("etag_multipart", [True, False])
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.calculate_checksum_with_content")
+def test_calculate_etag_content(m_checksum_content, etag_multipart):
+ module = MagicMock()
+ client = MagicMock()
- etag = s3.calculate_etag(
- module, str(my_image), etag="", s3=None, bucket=None, obj=None
- )
- assert etag == '"ed076287532e86365e841e92bfc50d8c"'
+ module.fail_json_aws.side_effect = SystemExit(2)
+ s3bucket_name = f"s3-bucket-{generate_random_string(8, False)}"
+ s3bucket_object = f"s3-bucket-object-{generate_random_string(8, False)}"
+ version = random.randint(0, 1000)
+ parts = 3
-def test_calculate_etag_multi_part(tmp_path_factory):
- module = FakeAnsibleModule()
- my_image = tmp_path_factory.mktemp("data") / "my.txt"
- my_image.write_text("Hello World!" * 1000)
+ etag = '"f20e84ac3d0c33cea77b3f29e3323a09"'
+ content = b'"f20e84ac3d0c33cea77b3f29e3323a09"'
+ digest = '"9aa254f7f76fd14435b21e9448525b99"'
- mocked_s3 = MagicMock()
- mocked_s3.head_object.side_effect = [{"ContentLength": "1000"} for _i in range(12)]
+ if not etag_multipart:
+ assert digest == s3.calculate_etag_content(
+ module, content, etag, client, s3bucket_name, s3bucket_object, version
+ )
+ else:
+ etag = f'"f20e84ac3d0c33cea77b3f29e3323a09-{parts}"'
+ m_checksum_content.return_value = digest
+ result = s3.calculate_etag_content(module, content, etag, client, s3bucket_name, s3bucket_object, version)
+ assert result == digest
- etag = s3.calculate_etag(
- module,
- str(my_image),
- etag='"f20e84ac3d0c33cea77b3f29e3323a09-12"',
- s3=mocked_s3,
- bucket="my-bucket",
- obj="my-obj",
- )
- assert etag == '"f20e84ac3d0c33cea77b3f29e3323a09-12"'
- mocked_s3.head_object.assert_called_with(
- Bucket="my-bucket", Key="my-obj", PartNumber=12
- )
+ m_checksum_content.assert_called_with(client, parts, s3bucket_name, s3bucket_object, version, content)
-def test_validate_bucket_name():
+@pytest.mark.parametrize("using_file", [True, False])
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.calculate_checksum_with_content")
+@patch("ansible_collections.amazon.aws.plugins.module_utils.s3.calculate_checksum_with_file")
+def test_calculate_etag_failure(m_checksum_file, m_checksum_content, using_file):
module = MagicMock()
+ client = MagicMock()
+
+ module.fail_json_aws.side_effect = SystemExit(2)
+
+ s3bucket_name = f"s3-bucket-{generate_random_string(8, False)}"
+ s3bucket_object = f"s3-bucket-object-{generate_random_string(8, False)}"
+ version = random.randint(0, 1000)
+ parts = 3
+
+ etag = f'"f20e84ac3d0c33cea77b3f29e3323a09-{parts}"'
+ content = "some content or file name"
+
+ if using_file:
+ test_method = s3.calculate_etag
+ m_checksum_file.side_effect = raise_botoclient_exception()
+ else:
+ test_method = s3.calculate_etag_content
+ m_checksum_content.side_effect = raise_botoclient_exception()
+
+ with pytest.raises(SystemExit):
+ test_method(module, content, etag, client, s3bucket_name, s3bucket_object, version)
+ module.fail_json_aws.assert_called()
+
+
+@pytest.mark.parametrize(
+ "bucket_name,result",
+ [
+ ("docexamplebucket1", None),
+ ("log-delivery-march-2020", None),
+ ("my-hosted-content", None),
+ ("docexamplewebsite.com", None),
+ ("www.docexamplewebsite.com", None),
+ ("my.example.s3.bucket", None),
+ ("doc", None),
+ ("doc_example_bucket", "invalid character(s) found in the bucket name"),
+ ("DocExampleBucket", "invalid character(s) found in the bucket name"),
+ ("doc-example-bucket-", "bucket names must begin and end with a letter or number"),
+ (
+ "this.string.has.more.than.63.characters.so.it.should.not.passed.the.validated",
+ "the length of an S3 bucket cannot exceed 63 characters",
+ ),
+ ("my", "the length of an S3 bucket must be at least 3 characters"),
+ ],
+)
+def test_validate_bucket_name(bucket_name, result):
+ assert result == s3.validate_bucket_name(bucket_name)
+
+
+mod_urlparse = "ansible_collections.amazon.aws.plugins.module_utils.s3.urlparse"
+
+
+class UrlInfo:
+ def __init__(self, scheme=None, hostname=None, port=None):
+ self.hostname = hostname
+ self.scheme = scheme
+ self.port = port
+
+
+@patch(mod_urlparse)
+def test_is_fakes3_with_none_arg(m_urlparse):
+ m_urlparse.side_effect = SystemExit(1)
+ result = s3.is_fakes3(None)
+ assert not result
+ m_urlparse.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "url,scheme,result",
+ [
+ ("https://test-s3.amazon.com", "https", False),
+ ("fakes3://test-s3.amazon.com", "fakes3", True),
+ ("fakes3s://test-s3.amazon.com", "fakes3s", True),
+ ],
+)
+@patch(mod_urlparse)
+def test_is_fakes3(m_urlparse, url, scheme, result):
+ m_urlparse.return_value = UrlInfo(scheme=scheme)
+ assert result == s3.is_fakes3(url)
+ m_urlparse.assert_called_with(url)
+
+
+@pytest.mark.parametrize(
+ "url,urlinfo,endpoint",
+ [
+ (
+ "fakes3://test-s3.amazon.com",
+ {"scheme": "fakes3", "hostname": "test-s3.amazon.com"},
+ {"endpoint": "http://test-s3.amazon.com:80", "use_ssl": False},
+ ),
+ (
+ "fakes3://test-s3.amazon.com:8080",
+ {"scheme": "fakes3", "hostname": "test-s3.amazon.com", "port": 8080},
+ {"endpoint": "http://test-s3.amazon.com:8080", "use_ssl": False},
+ ),
+ (
+ "fakes3s://test-s3.amazon.com",
+ {"scheme": "fakes3s", "hostname": "test-s3.amazon.com"},
+ {"endpoint": "https://test-s3.amazon.com:443", "use_ssl": True},
+ ),
+ (
+ "fakes3s://test-s3.amazon.com:9096",
+ {"scheme": "fakes3s", "hostname": "test-s3.amazon.com", "port": 9096},
+ {"endpoint": "https://test-s3.amazon.com:9096", "use_ssl": True},
+ ),
+ ],
+)
+@patch(mod_urlparse)
+def test_parse_fakes3_endpoint(m_urlparse, url, urlinfo, endpoint):
+ m_urlparse.return_value = UrlInfo(**urlinfo)
+ result = s3.parse_fakes3_endpoint(url)
+ assert endpoint == result
+ m_urlparse.assert_called_with(url)
+
- assert s3.validate_bucket_name(module, "docexamplebucket1") is True
- assert not module.fail_json.called
- assert s3.validate_bucket_name(module, "log-delivery-march-2020") is True
- assert not module.fail_json.called
- assert s3.validate_bucket_name(module, "my-hosted-content") is True
- assert not module.fail_json.called
-
- assert s3.validate_bucket_name(module, "docexamplewebsite.com") is True
- assert not module.fail_json.called
- assert s3.validate_bucket_name(module, "www.docexamplewebsite.com") is True
- assert not module.fail_json.called
- assert s3.validate_bucket_name(module, "my.example.s3.bucket") is True
- assert not module.fail_json.called
- assert s3.validate_bucket_name(module, "doc") is True
- assert not module.fail_json.called
-
- module.fail_json.reset_mock()
- s3.validate_bucket_name(module, "doc_example_bucket")
- assert module.fail_json.called
-
- module.fail_json.reset_mock()
- s3.validate_bucket_name(module, "DocExampleBucket")
- assert module.fail_json.called
- module.fail_json.reset_mock()
- s3.validate_bucket_name(module, "doc-example-bucket-")
- assert module.fail_json.called
- s3.validate_bucket_name(module, "my")
- assert module.fail_json.called
+@pytest.mark.parametrize(
+ "url,scheme,use_ssl",
+ [
+ ("https://test-s3-ceph.amazon.com", "https", True),
+ ("http://test-s3-ceph.amazon.com", "http", False),
+ ],
+)
+@patch(mod_urlparse)
+def test_parse_ceph_endpoint(m_urlparse, url, scheme, use_ssl):
+ m_urlparse.return_value = UrlInfo(scheme=scheme)
+ result = s3.parse_ceph_endpoint(url)
+ assert result == {"endpoint": url, "use_ssl": use_ssl}
+ m_urlparse.assert_called_with(url)
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_tagging.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_tagging.py
index 04ec96eb0..edeb7dabd 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_tagging.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_tagging.py
@@ -3,44 +3,56 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+import pytest
from ansible_collections.amazon.aws.plugins.module_utils.tagging import ansible_dict_to_boto3_tag_list
+from ansible_collections.amazon.aws.plugins.module_utils.tagging import ansible_dict_to_tag_filter_dict
from ansible_collections.amazon.aws.plugins.module_utils.tagging import boto3_tag_list_to_ansible_dict
from ansible_collections.amazon.aws.plugins.module_utils.tagging import boto3_tag_specifications
from ansible_collections.amazon.aws.plugins.module_utils.tagging import compare_aws_tags
-class TestTagging():
-
+class TestTagging:
# ========================================================
# Setup some initial data that we can use within our tests
# ========================================================
def setup_method(self):
-
self.tag_example_boto3_list = [
- {'Key': 'lowerCamel', 'Value': 'lowerCamelValue'},
- {'Key': 'UpperCamel', 'Value': 'upperCamelValue'},
- {'Key': 'Normal case', 'Value': 'Normal Value'},
- {'Key': 'lower case', 'Value': 'lower case value'}
+ {"Key": "lowerCamel", "Value": "lowerCamelValue"},
+ {"Key": "UpperCamel", "Value": "upperCamelValue"},
+ {"Key": "Normal case", "Value": "Normal Value"},
+ {"Key": "lower case", "Value": "lower case value"},
+ ]
+
+ self.tag_example_boto3_list_custom_key = [
+ {"MyKey": "lowerCamel", "MyValue": "lowerCamelValue"},
+ {"MyKey": "UpperCamel", "MyValue": "upperCamelValue"},
+ {"MyKey": "Normal case", "MyValue": "Normal Value"},
+ {"MyKey": "lower case", "MyValue": "lower case value"},
]
self.tag_example_dict = {
- 'lowerCamel': 'lowerCamelValue',
- 'UpperCamel': 'upperCamelValue',
- 'Normal case': 'Normal Value',
- 'lower case': 'lower case value'
+ "lowerCamel": "lowerCamelValue",
+ "UpperCamel": "upperCamelValue",
+ "Normal case": "Normal Value",
+ "lower case": "lower case value",
+ }
+
+ self.tag_filter_dict = {
+ "tag:lowerCamel": "lowerCamelValue",
+ "tag:UpperCamel": "upperCamelValue",
+ "tag:Normal case": "Normal Value",
+ "tag:lower case": "lower case value",
}
self.tag_minimal_boto3_list = [
- {'Key': 'mykey', 'Value': 'myvalue'},
+ {"Key": "mykey", "Value": "myvalue"},
]
- self.tag_minimal_dict = {'mykey': 'myvalue'}
+ self.tag_minimal_dict = {"mykey": "myvalue"}
- self.tag_aws_dict = {'aws:cloudformation:stack-name': 'ExampleStack'}
- self.tag_aws_changed = {'aws:cloudformation:stack-name': 'AnotherStack'}
+ self.tag_aws_dict = {"aws:cloudformation:stack-name": "ExampleStack"}
+ self.tag_aws_changed = {"aws:cloudformation:stack-name": "AnotherStack"}
# ========================================================
# tagging.ansible_dict_to_boto3_tag_list
@@ -48,10 +60,22 @@ class TestTagging():
def test_ansible_dict_to_boto3_tag_list(self):
converted_list = ansible_dict_to_boto3_tag_list(self.tag_example_dict)
- sorted_converted_list = sorted(converted_list, key=lambda i: (i['Key']))
- sorted_list = sorted(self.tag_example_boto3_list, key=lambda i: (i['Key']))
+ sorted_converted_list = sorted(converted_list, key=lambda i: (i["Key"]))
+ sorted_list = sorted(self.tag_example_boto3_list, key=lambda i: (i["Key"]))
assert sorted_converted_list == sorted_list
+ def test_ansible_dict_to_boto3_tag_list_empty(self):
+ assert ansible_dict_to_boto3_tag_list({}) == []
+ assert ansible_dict_to_boto3_tag_list(None) == []
+
+ def test_ansible_dict_to_boto3_tag_list_boolean(self):
+ dict_with_bool = dict(boolean=True)
+ list_with_bool = [{"Key": "boolean", "Value": "True"}]
+ assert ansible_dict_to_boto3_tag_list(dict_with_bool) == list_with_bool
+ dict_with_bool = dict(boolean=False)
+ list_with_bool = [{"Key": "boolean", "Value": "False"}]
+ assert ansible_dict_to_boto3_tag_list(dict_with_bool) == list_with_bool
+
# ========================================================
# tagging.boto3_tag_list_to_ansible_dict
# ========================================================
@@ -66,6 +90,14 @@ class TestTagging():
# Minio returns [{}] when there are no tags
assert boto3_tag_list_to_ansible_dict([{}]) == {}
+ def test_boto3_tag_list_to_ansible_dict_nondefault_keys(self):
+ converted_dict = boto3_tag_list_to_ansible_dict(self.tag_example_boto3_list_custom_key, "MyKey", "MyValue")
+ assert converted_dict == self.tag_example_dict
+
+ with pytest.raises(ValueError) as context:
+ boto3_tag_list_to_ansible_dict(self.tag_example_boto3_list, "MyKey", "MyValue")
+ assert "Couldn't find tag key" in str(context.value)
+
# ========================================================
# tagging.compare_aws_tags
# ========================================================
@@ -84,21 +116,21 @@ class TestTagging():
def test_compare_aws_tags_removed(self):
new_dict = dict(self.tag_example_dict)
- del new_dict['lowerCamel']
- del new_dict['Normal case']
+ del new_dict["lowerCamel"]
+ del new_dict["Normal case"]
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict)
assert {} == keys_to_set
- assert set(['lowerCamel', 'Normal case']) == set(keys_to_unset)
+ assert set(["lowerCamel", "Normal case"]) == set(keys_to_unset)
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict, purge_tags=False)
assert {} == keys_to_set
assert [] == keys_to_unset
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict, purge_tags=True)
assert {} == keys_to_set
- assert set(['lowerCamel', 'Normal case']) == set(keys_to_unset)
+ assert set(["lowerCamel", "Normal case"]) == set(keys_to_unset)
def test_compare_aws_tags_added(self):
new_dict = dict(self.tag_example_dict)
- new_keys = {'add_me': 'lower case', 'Me too!': 'Contributing'}
+ new_keys = {"add_me": "lower case", "Me too!": "Contributing"}
new_dict.update(new_keys)
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict)
assert new_keys == keys_to_set
@@ -112,7 +144,7 @@ class TestTagging():
def test_compare_aws_tags_changed(self):
new_dict = dict(self.tag_example_dict)
- new_keys = {'UpperCamel': 'anotherCamelValue', 'Normal case': 'normal value'}
+ new_keys = {"UpperCamel": "anotherCamelValue", "Normal case": "normal value"}
new_dict.update(new_keys)
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict)
assert new_keys == keys_to_set
@@ -124,21 +156,35 @@ class TestTagging():
assert new_keys == keys_to_set
assert [] == keys_to_unset
+ def test_compare_aws_tags_boolean(self):
+ dict_with_bool = dict(boolean=True)
+ dict_with_text_bool = dict(boolean="True")
+ # AWS always returns tag values as strings, so we only test this way around
+ keys_to_set, keys_to_unset = compare_aws_tags(dict_with_text_bool, dict_with_bool)
+ assert {} == keys_to_set
+ assert [] == keys_to_unset
+ keys_to_set, keys_to_unset = compare_aws_tags(dict_with_text_bool, dict_with_bool, purge_tags=False)
+ assert {} == keys_to_set
+ assert [] == keys_to_unset
+ keys_to_set, keys_to_unset = compare_aws_tags(dict_with_text_bool, dict_with_bool, purge_tags=True)
+ assert {} == keys_to_set
+ assert [] == keys_to_unset
+
def test_compare_aws_tags_complex_update(self):
# Adds 'Me too!', Changes 'UpperCamel' and removes 'Normal case'
new_dict = dict(self.tag_example_dict)
- new_keys = {'UpperCamel': 'anotherCamelValue', 'Me too!': 'Contributing'}
+ new_keys = {"UpperCamel": "anotherCamelValue", "Me too!": "Contributing"}
new_dict.update(new_keys)
- del new_dict['Normal case']
+ del new_dict["Normal case"]
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict)
assert new_keys == keys_to_set
- assert ['Normal case'] == keys_to_unset
+ assert ["Normal case"] == keys_to_unset
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict, purge_tags=False)
assert new_keys == keys_to_set
assert [] == keys_to_unset
keys_to_set, keys_to_unset = compare_aws_tags(self.tag_example_dict, new_dict, purge_tags=True)
assert new_keys == keys_to_set
- assert ['Normal case'] == keys_to_unset
+ assert ["Normal case"] == keys_to_unset
def test_compare_aws_tags_aws(self):
starting_tags = dict(self.tag_aws_dict)
@@ -158,46 +204,62 @@ class TestTagging():
old_dict.update(self.tag_aws_dict)
# Adds 'Me too!', Changes 'UpperCamel' and removes 'Normal case'
new_dict = dict(self.tag_example_dict)
- new_keys = {'UpperCamel': 'anotherCamelValue', 'Me too!': 'Contributing'}
+ new_keys = {"UpperCamel": "anotherCamelValue", "Me too!": "Contributing"}
new_dict.update(new_keys)
- del new_dict['Normal case']
+ del new_dict["Normal case"]
keys_to_set, keys_to_unset = compare_aws_tags(old_dict, new_dict)
assert new_keys == keys_to_set
- assert ['Normal case'] == keys_to_unset
+ assert ["Normal case"] == keys_to_unset
keys_to_set, keys_to_unset = compare_aws_tags(old_dict, new_dict, purge_tags=False)
assert new_keys == keys_to_set
assert [] == keys_to_unset
keys_to_set, keys_to_unset = compare_aws_tags(old_dict, new_dict, purge_tags=True)
assert new_keys == keys_to_set
- assert ['Normal case'] == keys_to_unset
+ assert ["Normal case"] == keys_to_unset
# ========================================================
# tagging.boto3_tag_specifications
# ========================================================
+ def test_boto3_tag_specifications_empty(self):
+ assert boto3_tag_specifications(None) is None
+ assert boto3_tag_specifications({}) is None
+
# Builds upon ansible_dict_to_boto3_tag_list, assume that if a minimal tag
# dictionary behaves as expected, then all will behave
def test_boto3_tag_specifications_no_type(self):
tag_specification = boto3_tag_specifications(self.tag_minimal_dict)
- expected_specification = [{'Tags': self.tag_minimal_boto3_list}]
+ expected_specification = [{"Tags": self.tag_minimal_boto3_list}]
assert tag_specification == expected_specification
def test_boto3_tag_specifications_string_type(self):
- tag_specification = boto3_tag_specifications(self.tag_minimal_dict, 'instance')
- expected_specification = [{'ResourceType': 'instance', 'Tags': self.tag_minimal_boto3_list}]
+ tag_specification = boto3_tag_specifications(self.tag_minimal_dict, "instance")
+ expected_specification = [{"ResourceType": "instance", "Tags": self.tag_minimal_boto3_list}]
assert tag_specification == expected_specification
def test_boto3_tag_specifications_single_type(self):
- tag_specification = boto3_tag_specifications(self.tag_minimal_dict, ['instance'])
- expected_specification = [{'ResourceType': 'instance', 'Tags': self.tag_minimal_boto3_list}]
+ tag_specification = boto3_tag_specifications(self.tag_minimal_dict, ["instance"])
+ expected_specification = [{"ResourceType": "instance", "Tags": self.tag_minimal_boto3_list}]
assert tag_specification == expected_specification
def test_boto3_tag_specifications_multipe_types(self):
- tag_specification = boto3_tag_specifications(self.tag_minimal_dict, ['instance', 'volume'])
+ tag_specification = boto3_tag_specifications(self.tag_minimal_dict, ["instance", "volume"])
expected_specification = [
- {'ResourceType': 'instance', 'Tags': self.tag_minimal_boto3_list},
- {'ResourceType': 'volume', 'Tags': self.tag_minimal_boto3_list},
+ {"ResourceType": "instance", "Tags": self.tag_minimal_boto3_list},
+ {"ResourceType": "volume", "Tags": self.tag_minimal_boto3_list},
]
- sorted_tag_spec = sorted(tag_specification, key=lambda i: (i['ResourceType']))
- sorted_expected = sorted(expected_specification, key=lambda i: (i['ResourceType']))
+ sorted_tag_spec = sorted(tag_specification, key=lambda i: (i["ResourceType"]))
+ sorted_expected = sorted(expected_specification, key=lambda i: (i["ResourceType"]))
assert sorted_tag_spec == sorted_expected
+
+ def test_ansible_dict_to_tag_filter_dict_empty(self):
+ assert ansible_dict_to_tag_filter_dict(None) == {}
+ assert ansible_dict_to_tag_filter_dict({}) == {}
+
+ def test_ansible_dict_to_tag_filter_dict_example(self):
+ assert ansible_dict_to_tag_filter_dict(self.tag_example_dict) == self.tag_filter_dict
+
+ def test_ansible_dict_to_tag_filter_dict_boolean(self):
+ dict_with_bool = {"boolean": True}
+ filter_dict_with_bool = {"tag:boolean": "True"}
+ assert ansible_dict_to_tag_filter_dict(dict_with_bool) == filter_dict_with_bool
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/test_tower.py b/ansible_collections/amazon/aws/tests/unit/module_utils/test_tower.py
index 9e1d90213..181caae9e 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/test_tower.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/test_tower.py
@@ -3,29 +3,26 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-# import pytest
-
import ansible_collections.amazon.aws.plugins.module_utils.tower as utils_tower
-WINDOWS_DOWNLOAD = "Invoke-Expression ((New-Object System.Net.Webclient).DownloadString(" \
+WINDOWS_DOWNLOAD = (
+ "Invoke-Expression ((New-Object System.Net.Webclient).DownloadString("
"'https://raw.githubusercontent.com/ansible/ansible/devel/examples/scripts/ConfigureRemotingForAnsible.ps1'))"
-EXAMPLE_PASSWORD = 'MY_EXAMPLE_PASSWORD'
+)
+EXAMPLE_PASSWORD = "MY_EXAMPLE_PASSWORD"
WINDOWS_INVOKE = "$admin.PSBase.Invoke('SetPassword', 'MY_EXAMPLE_PASSWORD'"
EXAMPLE_TOWER = "tower.example.com"
-EXAMPLE_TEMPLATE = 'My Template'
-EXAMPLE_KEY = '123EXAMPLE123'
-LINUX_TRIGGER_V1 = 'https://tower.example.com/api/v1/job_templates/My%20Template/callback/'
-LINUX_TRIGGER_V2 = 'https://tower.example.com/api/v2/job_templates/My%20Template/callback/'
+EXAMPLE_TEMPLATE = "My Template"
+EXAMPLE_KEY = "123EXAMPLE123"
+LINUX_TRIGGER_V1 = "https://tower.example.com/api/v1/job_templates/My%20Template/callback/"
+LINUX_TRIGGER_V2 = "https://tower.example.com/api/v2/job_templates/My%20Template/callback/"
def test_windows_callback_no_password():
user_data = utils_tower._windows_callback_script()
assert WINDOWS_DOWNLOAD in user_data
- assert 'SetPassword' not in user_data
+ assert "SetPassword" not in user_data
def test_windows_callback_password():
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/__init__.py b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_ansible_dict_to_boto3_filter_list.py b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_ansible_dict_to_boto3_filter_list.py
index 23c82b173..1fd6c6267 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_ansible_dict_to_boto3_filter_list.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_ansible_dict_to_boto3_filter_list.py
@@ -3,27 +3,22 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-
-__metaclass__ = type
-
from ansible_collections.amazon.aws.plugins.module_utils.transformation import ansible_dict_to_boto3_filter_list
-class TestAnsibleDictToBoto3FilterList():
-
+class TestAnsibleDictToBoto3FilterList:
# ========================================================
# ec2.ansible_dict_to_boto3_filter_list
# ========================================================
def test_ansible_dict_with_string_to_boto3_filter_list(self):
- filters = {'some-aws-id': 'i-01234567'}
+ filters = {"some-aws-id": "i-01234567"}
filter_list_string = [
{
- 'Name': 'some-aws-id',
- 'Values': [
- 'i-01234567',
- ]
+ "Name": "some-aws-id",
+ "Values": [
+ "i-01234567",
+ ],
}
]
@@ -31,13 +26,13 @@ class TestAnsibleDictToBoto3FilterList():
assert converted_filters_list == filter_list_string
def test_ansible_dict_with_boolean_to_boto3_filter_list(self):
- filters = {'enabled': True}
+ filters = {"enabled": True}
filter_list_boolean = [
{
- 'Name': 'enabled',
- 'Values': [
- 'true',
- ]
+ "Name": "enabled",
+ "Values": [
+ "true",
+ ],
}
]
@@ -45,13 +40,13 @@ class TestAnsibleDictToBoto3FilterList():
assert converted_filters_bool == filter_list_boolean
def test_ansible_dict_with_integer_to_boto3_filter_list(self):
- filters = {'version': 1}
+ filters = {"version": 1}
filter_list_integer = [
{
- 'Name': 'version',
- 'Values': [
- '1',
- ]
+ "Name": "version",
+ "Values": [
+ "1",
+ ],
}
]
@@ -59,15 +54,8 @@ class TestAnsibleDictToBoto3FilterList():
assert converted_filters_int == filter_list_integer
def test_ansible_dict_with_list_to_boto3_filter_list(self):
- filters = {'version': ['1', '2', '3']}
- filter_list_integer = [
- {
- 'Name': 'version',
- 'Values': [
- '1', '2', '3'
- ]
- }
- ]
+ filters = {"version": ["1", "2", "3"]}
+ filter_list_integer = [{"Name": "version", "Values": ["1", "2", "3"]}]
converted_filters_int = ansible_dict_to_boto3_filter_list(filters)
assert converted_filters_int == filter_list_integer
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_map_complex_type.py b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_map_complex_type.py
index 2300e2351..3842491c0 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_map_complex_type.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_map_complex_type.py
@@ -3,18 +3,15 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import sentinel
from ansible_collections.amazon.aws.plugins.module_utils.transformation import map_complex_type
-from ansible_collections.amazon.aws.tests.unit.compat.mock import sentinel
-
def test_map_complex_type_over_dict():
- type_map = {'minimum_healthy_percent': 'int', 'maximum_percent': 'int'}
- complex_type_dict = {'minimum_healthy_percent': "75", 'maximum_percent': "150"}
- complex_type_expected = {'minimum_healthy_percent': 75, 'maximum_percent': 150}
+ type_map = {"minimum_healthy_percent": "int", "maximum_percent": "int"}
+ complex_type_dict = {"minimum_healthy_percent": "75", "maximum_percent": "150"}
+ complex_type_expected = {"minimum_healthy_percent": 75, "maximum_percent": 150}
complex_type_mapped = map_complex_type(complex_type_dict, type_map)
@@ -22,79 +19,79 @@ def test_map_complex_type_over_dict():
def test_map_complex_type_empty():
- type_map = {'minimum_healthy_percent': 'int', 'maximum_percent': 'int'}
+ type_map = {"minimum_healthy_percent": "int", "maximum_percent": "int"}
assert map_complex_type({}, type_map) == {}
assert map_complex_type([], type_map) == []
assert map_complex_type(None, type_map) is None
def test_map_complex_type_no_type():
- type_map = {'some_entry': 'int'}
- complex_dict = {'another_entry': sentinel.UNSPECIFIED_MAPPING}
+ type_map = {"some_entry": "int"}
+ complex_dict = {"another_entry": sentinel.UNSPECIFIED_MAPPING}
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == complex_dict
# we should have the original sentinel object, even if it's a new dictionary
- assert mapped_dict['another_entry'] is sentinel.UNSPECIFIED_MAPPING
+ assert mapped_dict["another_entry"] is sentinel.UNSPECIFIED_MAPPING
def test_map_complex_type_list():
- type_map = {'some_entry': 'int'}
- complex_dict = {'some_entry': ["1", "2", "3"]}
- expected_dict = {'some_entry': [1, 2, 3]}
+ type_map = {"some_entry": "int"}
+ complex_dict = {"some_entry": ["1", "2", "3"]}
+ expected_dict = {"some_entry": [1, 2, 3]}
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == expected_dict
def test_map_complex_type_list_type():
- type_map = {'some_entry': ['int']}
- complex_dict = {'some_entry': ["1", "2", "3"]}
- expected_dict = {'some_entry': [1, 2, 3]}
+ type_map = {"some_entry": ["int"]}
+ complex_dict = {"some_entry": ["1", "2", "3"]}
+ expected_dict = {"some_entry": [1, 2, 3]}
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == expected_dict
- type_map = {'some_entry': ['int']}
- complex_dict = {'some_entry': "1"}
- expected_dict = {'some_entry': 1}
+ type_map = {"some_entry": ["int"]}
+ complex_dict = {"some_entry": "1"}
+ expected_dict = {"some_entry": 1}
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == expected_dict
def test_map_complex_type_complex():
type_map = {
- 'my_integer': 'int',
- 'my_bool': 'bool',
- 'my_string': 'str',
- 'my_typelist_of_int': ['int'],
- 'my_maplist_of_int': 'int',
- 'my_unused': 'bool',
+ "my_integer": "int",
+ "my_bool": "bool",
+ "my_string": "str",
+ "my_typelist_of_int": ["int"],
+ "my_maplist_of_int": "int",
+ "my_unused": "bool",
}
complex_dict = {
- 'my_integer': '-24',
- 'my_bool': 'true',
- 'my_string': 43,
- 'my_typelist_of_int': '5',
- 'my_maplist_of_int': ['-26', '47'],
- 'my_unconverted': sentinel.UNSPECIFIED_MAPPING,
+ "my_integer": "-24",
+ "my_bool": "true",
+ "my_string": 43,
+ "my_typelist_of_int": "5",
+ "my_maplist_of_int": ["-26", "47"],
+ "my_unconverted": sentinel.UNSPECIFIED_MAPPING,
}
expected_dict = {
- 'my_integer': -24,
- 'my_bool': True,
- 'my_string': '43',
- 'my_typelist_of_int': 5,
- 'my_maplist_of_int': [-26, 47],
- 'my_unconverted': sentinel.UNSPECIFIED_MAPPING,
+ "my_integer": -24,
+ "my_bool": True,
+ "my_string": "43",
+ "my_typelist_of_int": 5,
+ "my_maplist_of_int": [-26, 47],
+ "my_unconverted": sentinel.UNSPECIFIED_MAPPING,
}
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == expected_dict
- assert mapped_dict['my_unconverted'] is sentinel.UNSPECIFIED_MAPPING
- assert mapped_dict['my_bool'] is True
+ assert mapped_dict["my_unconverted"] is sentinel.UNSPECIFIED_MAPPING
+ assert mapped_dict["my_bool"] is True
def test_map_complex_type_nested_list():
- type_map = {'my_integer': 'int'}
- complex_dict = [{'my_integer': '5'}, {'my_integer': '-24'}]
- expected_dict = [{'my_integer': 5}, {'my_integer': -24}]
+ type_map = {"my_integer": "int"}
+ complex_dict = [{"my_integer": "5"}, {"my_integer": "-24"}]
+ expected_dict = [{"my_integer": 5}, {"my_integer": -24}]
mapped_dict = map_complex_type(complex_dict, type_map)
assert mapped_dict == expected_dict
diff --git a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_scrub_none_parameters.py b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_scrub_none_parameters.py
index 82fd41ed3..6d87b2618 100644
--- a/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_scrub_none_parameters.py
+++ b/ansible_collections/amazon/aws/tests/unit/module_utils/transformation/test_scrub_none_parameters.py
@@ -1,83 +1,115 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
from ansible_collections.amazon.aws.plugins.module_utils.transformation import scrub_none_parameters
scrub_none_test_data = [
- (dict(), # Input
- dict(), # Output with descend_into_lists=False
- dict(), # Output with descend_into_lists=True
- ),
- (dict(param1=None, param2=None),
- dict(),
- dict(),
- ),
- (dict(param1='something'),
- dict(param1='something'),
- dict(param1='something'),
- ),
- (dict(param1=False),
- dict(param1=False),
- dict(param1=False),
- ),
- (dict(param1=None, param2=[]),
- dict(param2=[]),
- dict(param2=[]),
- ),
- (dict(param1=None, param2=["list_value"]),
- dict(param2=["list_value"]),
- dict(param2=["list_value"]),
- ),
- (dict(param1='something', param2='something_else'),
- dict(param1='something', param2='something_else'),
- dict(param1='something', param2='something_else'),
- ),
- (dict(param1='something', param2=dict()),
- dict(param1='something', param2=dict()),
- dict(param1='something', param2=dict()),
- ),
- (dict(param1='something', param2=None),
- dict(param1='something'),
- dict(param1='something'),
- ),
- (dict(param1='something', param2=None, param3=None),
- dict(param1='something'),
- dict(param1='something'),
- ),
- (dict(param1='something', param2=None, param3=None, param4='something_else'),
- dict(param1='something', param4='something_else'),
- dict(param1='something', param4='something_else'),
- ),
- (dict(param1=dict(sub_param1='something', sub_param2=dict(sub_sub_param1='another_thing')), param2=None, param3=None, param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=dict(sub_sub_param1='another_thing')), param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=dict(sub_sub_param1='another_thing')), param4='something_else'),
- ),
- (dict(param1=dict(sub_param1='something', sub_param2=dict()), param2=None, param3=None, param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=dict()), param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=dict()), param4='something_else'),
- ),
- (dict(param1=dict(sub_param1='something', sub_param2=False), param2=None, param3=None, param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=False), param4='something_else'),
- dict(param1=dict(sub_param1='something', sub_param2=False), param4='something_else'),
- ),
- (dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1', sub_param2='my_dict_nested_in_a_list_2')], param2=[]),
- dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1', sub_param2='my_dict_nested_in_a_list_2')], param2=[]),
- dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1', sub_param2='my_dict_nested_in_a_list_2')], param2=[]),
- ),
- (dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1', sub_param2=None)], param2=[]),
- dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1', sub_param2=None)], param2=[]),
- dict(param1=[dict(sub_param1='my_dict_nested_in_a_list_1')], param2=[]),
- ),
- (dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=[]),
- dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=[]),
- dict(param1=[dict(sub_param1=[dict()])], param2=[]),
- ),
- (dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=None),
- dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)]),
- dict(param1=[dict(sub_param1=[dict()])]),
- ),
+ (
+ dict(), # Input
+ dict(), # Output with descend_into_lists=False
+ dict(), # Output with descend_into_lists=True
+ ),
+ (
+ dict(param1=None, param2=None),
+ dict(),
+ dict(),
+ ),
+ (
+ dict(param1="something"),
+ dict(param1="something"),
+ dict(param1="something"),
+ ),
+ (
+ dict(param1=False),
+ dict(param1=False),
+ dict(param1=False),
+ ),
+ (
+ dict(param1=None, param2=[]),
+ dict(param2=[]),
+ dict(param2=[]),
+ ),
+ (
+ dict(param1=None, param2=["list_value"]),
+ dict(param2=["list_value"]),
+ dict(param2=["list_value"]),
+ ),
+ (
+ dict(param1="something", param2="something_else"),
+ dict(param1="something", param2="something_else"),
+ dict(param1="something", param2="something_else"),
+ ),
+ (
+ dict(param1="something", param2=dict()),
+ dict(param1="something", param2=dict()),
+ dict(param1="something", param2=dict()),
+ ),
+ (
+ dict(param1="something", param2=None),
+ dict(param1="something"),
+ dict(param1="something"),
+ ),
+ (
+ dict(param1="something", param2=None, param3=None),
+ dict(param1="something"),
+ dict(param1="something"),
+ ),
+ (
+ dict(param1="something", param2=None, param3=None, param4="something_else"),
+ dict(param1="something", param4="something_else"),
+ dict(param1="something", param4="something_else"),
+ ),
+ (
+ dict(
+ param1=dict(sub_param1="something", sub_param2=dict(sub_sub_param1="another_thing")),
+ param2=None,
+ param3=None,
+ param4="something_else",
+ ),
+ dict(
+ param1=dict(sub_param1="something", sub_param2=dict(sub_sub_param1="another_thing")),
+ param4="something_else",
+ ),
+ dict(
+ param1=dict(sub_param1="something", sub_param2=dict(sub_sub_param1="another_thing")),
+ param4="something_else",
+ ),
+ ),
+ (
+ dict(param1=dict(sub_param1="something", sub_param2=dict()), param2=None, param3=None, param4="something_else"),
+ dict(param1=dict(sub_param1="something", sub_param2=dict()), param4="something_else"),
+ dict(param1=dict(sub_param1="something", sub_param2=dict()), param4="something_else"),
+ ),
+ (
+ dict(param1=dict(sub_param1="something", sub_param2=False), param2=None, param3=None, param4="something_else"),
+ dict(param1=dict(sub_param1="something", sub_param2=False), param4="something_else"),
+ dict(param1=dict(sub_param1="something", sub_param2=False), param4="something_else"),
+ ),
+ (
+ dict(
+ param1=[dict(sub_param1="my_dict_nested_in_a_list_1", sub_param2="my_dict_nested_in_a_list_2")], param2=[]
+ ),
+ dict(
+ param1=[dict(sub_param1="my_dict_nested_in_a_list_1", sub_param2="my_dict_nested_in_a_list_2")], param2=[]
+ ),
+ dict(
+ param1=[dict(sub_param1="my_dict_nested_in_a_list_1", sub_param2="my_dict_nested_in_a_list_2")], param2=[]
+ ),
+ ),
+ (
+ dict(param1=[dict(sub_param1="my_dict_nested_in_a_list_1", sub_param2=None)], param2=[]),
+ dict(param1=[dict(sub_param1="my_dict_nested_in_a_list_1", sub_param2=None)], param2=[]),
+ dict(param1=[dict(sub_param1="my_dict_nested_in_a_list_1")], param2=[]),
+ ),
+ (
+ dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=[]),
+ dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=[]),
+ dict(param1=[dict(sub_param1=[dict()])], param2=[]),
+ ),
+ (
+ dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)], param2=None),
+ dict(param1=[dict(sub_param1=[dict(sub_sub_param1=None)], sub_param2=None)]),
+ dict(param1=[dict(sub_param1=[dict()])]),
+ ),
]
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/test_plugin.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/test_plugin.py
new file mode 100644
index 000000000..f374934fd
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/base/test_plugin.py
@@ -0,0 +1,177 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import warnings
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+from ansible.errors import AnsibleError
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.base as utils_base
+
+
+def test_debug(monkeypatch):
+ monkeypatch.setattr(utils_base.display, "debug", warnings.warn)
+ base_plugin = utils_base.AWSPluginBase()
+
+ with pytest.warns(UserWarning, match="My debug message"):
+ base_plugin.debug("My debug message")
+
+
+def test_warn(monkeypatch):
+ monkeypatch.setattr(utils_base.display, "warning", warnings.warn)
+ base_plugin = utils_base.AWSPluginBase()
+
+ with pytest.warns(UserWarning, match="My warning message"):
+ base_plugin.warn("My warning message")
+
+
+def test_do_fail():
+ base_plugin = utils_base.AWSPluginBase()
+
+ with pytest.raises(AnsibleError, match="My exception message"):
+ base_plugin._do_fail("My exception message")
+
+
+def test_fail_aws():
+ base_plugin = utils_base.AWSPluginBase()
+ example_exception = Exception("My example exception")
+ example_message = "My example failure message"
+
+ with pytest.raises(AnsibleError, match="My example failure message"):
+ base_plugin.fail_aws(example_message)
+
+ with pytest.raises(AnsibleError, match="My example failure message"):
+ base_plugin.fail_aws(message=example_message)
+
+ # As long as example_example_exception is supported by to_native, we're good.
+ with pytest.raises(AnsibleError, match="My example exception"):
+ base_plugin.fail_aws(example_exception)
+
+ with pytest.raises(AnsibleError, match="My example failure message: My example exception"):
+ base_plugin.fail_aws(example_message, example_exception)
+
+ with pytest.raises(AnsibleError, match="My example failure message: My example exception"):
+ base_plugin.fail_aws(message=example_message, exception=example_exception)
+
+
+def test_region(monkeypatch):
+ get_aws_region = MagicMock(name="get_aws_region")
+ get_aws_region.return_value = sentinel.RETURNED_REGION
+ monkeypatch.setattr(utils_base, "get_aws_region", get_aws_region)
+ base_plugin = utils_base.AWSPluginBase()
+
+ assert base_plugin.region is sentinel.RETURNED_REGION
+ assert get_aws_region.call_args == call(base_plugin)
+
+
+def test_require_aws_sdk(monkeypatch):
+ require_sdk = MagicMock(name="check_sdk_version_supported")
+ require_sdk.return_value = sentinel.RETURNED_SDK
+ monkeypatch.setattr(utils_base, "check_sdk_version_supported", require_sdk)
+
+ base_plugin = utils_base.AWSPluginBase()
+ assert base_plugin.require_aws_sdk() is sentinel.RETURNED_SDK
+ assert require_sdk.call_args == call(botocore_version=None, boto3_version=None, warn=base_plugin.warn)
+
+ base_plugin = utils_base.AWSPluginBase()
+ assert (
+ base_plugin.require_aws_sdk(botocore_version=sentinel.PARAM_BOTOCORE, boto3_version=sentinel.PARAM_BOTO3)
+ is sentinel.RETURNED_SDK
+ )
+ assert require_sdk.call_args == call(
+ botocore_version=sentinel.PARAM_BOTOCORE, boto3_version=sentinel.PARAM_BOTO3, warn=base_plugin.warn
+ )
+
+
+def test_client_no_wrapper(monkeypatch):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_base, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_base, "boto3_conn", boto3_conn)
+
+ base_plugin = utils_base.AWSPluginBase()
+ assert base_plugin.client(sentinel.PARAM_SERVICE) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(base_plugin)
+ assert boto3_conn.call_args == call(
+ base_plugin,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+
+def test_client_wrapper(monkeypatch):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_base, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_base, "boto3_conn", boto3_conn)
+
+ base_plugin = utils_base.AWSPluginBase()
+ wrapped_conn = base_plugin.client(sentinel.PARAM_SERVICE, sentinel.PARAM_WRAPPER)
+ assert wrapped_conn.client is sentinel.BOTO3_CONN
+ assert wrapped_conn.retry is sentinel.PARAM_WRAPPER
+ assert get_aws_connection_info.call_args == call(base_plugin)
+ assert boto3_conn.call_args == call(
+ base_plugin,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+ # Check that we can override parameters
+ wrapped_conn = base_plugin.client(sentinel.PARAM_SERVICE, sentinel.PARAM_WRAPPER, region=sentinel.PARAM_REGION)
+ assert wrapped_conn.client is sentinel.BOTO3_CONN
+ assert wrapped_conn.retry is sentinel.PARAM_WRAPPER
+ assert get_aws_connection_info.call_args == call(base_plugin)
+ assert boto3_conn.call_args == call(
+ base_plugin,
+ conn_type="client",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+
+def test_resource(monkeypatch):
+ get_aws_connection_info = MagicMock(name="get_aws_connection_info")
+ sentinel.CONN_ARGS = dict()
+ get_aws_connection_info.return_value = (sentinel.CONN_REGION, sentinel.CONN_URL, sentinel.CONN_ARGS)
+ monkeypatch.setattr(utils_base, "get_aws_connection_info", get_aws_connection_info)
+ boto3_conn = MagicMock(name="boto3_conn")
+ boto3_conn.return_value = sentinel.BOTO3_CONN
+ monkeypatch.setattr(utils_base, "boto3_conn", boto3_conn)
+
+ base_plugin = utils_base.AWSPluginBase()
+ assert base_plugin.resource(sentinel.PARAM_SERVICE) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(base_plugin)
+ assert boto3_conn.call_args == call(
+ base_plugin,
+ conn_type="resource",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.CONN_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
+
+ assert base_plugin.resource(sentinel.PARAM_SERVICE, region=sentinel.PARAM_REGION) is sentinel.BOTO3_CONN
+ assert get_aws_connection_info.call_args == call(base_plugin)
+ assert boto3_conn.call_args == call(
+ base_plugin,
+ conn_type="resource",
+ resource=sentinel.PARAM_SERVICE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.CONN_URL,
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_boto3_conn_plugin.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_boto3_conn_plugin.py
new file mode 100644
index 000000000..766257d3b
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_boto3_conn_plugin.py
@@ -0,0 +1,131 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+try:
+ import botocore
+except ImportError:
+ pass
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.botocore as utils_botocore
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_plugin(monkeypatch):
+ aws_plugin = MagicMock()
+ aws_plugin.fail_aws.side_effect = FailException()
+ monkeypatch.setattr(aws_plugin, "ansible_name", sentinel.PLUGIN_NAME)
+ return aws_plugin
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.boto3_conn
+###############################################################
+def test_boto3_conn_success_plugin(monkeypatch, aws_plugin, botocore_utils):
+ connection_method = MagicMock(name="_boto3_conn")
+ monkeypatch.setattr(botocore_utils, "_boto3_conn", connection_method)
+ connection_method.return_value = sentinel.RETURNED_CONNECTION
+
+ assert botocore_utils.boto3_conn(aws_plugin) is sentinel.RETURNED_CONNECTION
+ passed_args = connection_method.call_args
+ assert passed_args == call(conn_type=None, resource=None, region=None, endpoint=None)
+
+ result = botocore_utils.boto3_conn(
+ aws_plugin,
+ conn_type=sentinel.PARAM_CONNTYPE,
+ resource=sentinel.PARAM_RESOURCE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.PARAM_ENDPOINT,
+ extra_arg=sentinel.PARAM_EXTRA,
+ )
+ assert result is sentinel.RETURNED_CONNECTION
+ passed_args = connection_method.call_args
+ assert passed_args == call(
+ conn_type=sentinel.PARAM_CONNTYPE,
+ resource=sentinel.PARAM_RESOURCE,
+ region=sentinel.PARAM_REGION,
+ endpoint=sentinel.PARAM_ENDPOINT,
+ extra_arg=sentinel.PARAM_EXTRA,
+ )
+
+
+@pytest.mark.parametrize(
+ "failure, custom_error",
+ [
+ (ValueError(sentinel.VALUE_ERROR), "Couldn't connect to AWS: sentinel.VALUE_ERROR"),
+ (botocore.exceptions.ProfileNotFound(profile=sentinel.PROFILE_ERROR), None),
+ (
+ botocore.exceptions.PartialCredentialsError(
+ provider=sentinel.CRED_ERROR_PROV, cred_var=sentinel.CRED_ERROR_VAR
+ ),
+ None,
+ ),
+ (botocore.exceptions.NoCredentialsError(), None),
+ (botocore.exceptions.ConfigParseError(path=sentinel.PARSE_ERROR), None),
+ (botocore.exceptions.NoRegionError(), "The sentinel.PLUGIN_NAME plugin requires a region"),
+ ],
+)
+def test_boto3_conn_exception_plugin(monkeypatch, aws_plugin, botocore_utils, failure, custom_error):
+ connection_method = MagicMock(name="_boto3_conn")
+ monkeypatch.setattr(botocore_utils, "_boto3_conn", connection_method)
+ connection_method.side_effect = failure
+
+ if custom_error is None:
+ custom_error = str(failure)
+
+ with pytest.raises(FailException):
+ botocore_utils.boto3_conn(aws_plugin)
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert custom_error in fail_args[0][0]
+
+
+@pytest.mark.parametrize(
+ "failure, custom_error",
+ [
+ (ValueError(sentinel.VALUE_ERROR), "Couldn't connect to AWS: sentinel.VALUE_ERROR"),
+ (botocore.exceptions.ProfileNotFound(profile=sentinel.PROFILE_ERROR), None),
+ (
+ botocore.exceptions.PartialCredentialsError(
+ provider=sentinel.CRED_ERROR_PROV, cred_var=sentinel.CRED_ERROR_VAR
+ ),
+ None,
+ ),
+ (botocore.exceptions.NoCredentialsError(), None),
+ (botocore.exceptions.ConfigParseError(path=sentinel.PARSE_ERROR), None),
+ (
+ botocore.exceptions.NoRegionError(),
+ "A region is required and none was found",
+ ),
+ ],
+)
+def test_boto3_conn_exception_no_plugin_name(monkeypatch, aws_plugin, botocore_utils, failure, custom_error):
+ connection_method = MagicMock(name="_boto3_conn")
+ monkeypatch.setattr(botocore_utils, "_boto3_conn", connection_method)
+ connection_method.side_effect = failure
+ del aws_plugin.ansible_name
+
+ if custom_error is None:
+ custom_error = str(failure)
+
+ with pytest.raises(FailException):
+ botocore_utils.boto3_conn(aws_plugin)
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert custom_error in fail_args[0][0]
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_aws_region.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_aws_region.py
new file mode 100644
index 000000000..e3f18282e
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_aws_region.py
@@ -0,0 +1,84 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.botocore as utils_botocore
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleBotocoreError
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_plugin(monkeypatch):
+ aws_plugin = MagicMock()
+ aws_plugin.fail_aws.side_effect = FailException()
+ aws_plugin.get_options.return_value = sentinel.PLUGIN_OPTIONS
+
+ return aws_plugin
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.get_aws_region
+###############################################################
+def test_get_aws_region_simple_plugin(monkeypatch, aws_plugin, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+ region_method.return_value = sentinel.RETURNED_REGION
+
+ assert botocore_utils.get_aws_region(aws_plugin) is sentinel.RETURNED_REGION
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # args[0]
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+
+def test_get_aws_region_exception_nested_plugin(monkeypatch, aws_plugin, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ region_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ assert botocore_utils.get_aws_region(aws_plugin)
+
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert fail_args == call("sentinel.ERROR_MSG: sentinel.ERROR_EX")
+
+
+def test_get_aws_region_exception_msg_plugin(monkeypatch, aws_plugin, botocore_utils):
+ region_method = MagicMock(name="_aws_region")
+ monkeypatch.setattr(botocore_utils, "_aws_region", region_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG)
+ region_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ assert botocore_utils.get_aws_region(aws_plugin)
+
+ passed_args = region_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert fail_args == call("sentinel.ERROR_MSG")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_connection_info.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_connection_info.py
new file mode 100644
index 000000000..95c3ae54f
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/botocore/test_get_connection_info.py
@@ -0,0 +1,83 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.botocore as utils_botocore
+from ansible_collections.amazon.aws.plugins.module_utils.exceptions import AnsibleBotocoreError
+
+
+class FailException(Exception):
+ pass
+
+
+@pytest.fixture
+def aws_plugin(monkeypatch):
+ aws_plugin = MagicMock()
+ aws_plugin.fail_aws.side_effect = FailException()
+ aws_plugin.get_options.return_value = sentinel.PLUGIN_OPTIONS
+ return aws_plugin
+
+
+@pytest.fixture
+def botocore_utils(monkeypatch):
+ return utils_botocore
+
+
+###############################################################
+# module_utils.botocore.get_aws_connection_info
+###############################################################
+def test_get_aws_connection_info_simple_plugin(monkeypatch, aws_plugin, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+ connection_info_method.return_value = sentinel.RETURNED_INFO
+
+ assert botocore_utils.get_aws_connection_info(aws_plugin) is sentinel.RETURNED_INFO
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # args[0]
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+
+def test_get_aws_connection_info_exception_nested_plugin(monkeypatch, aws_plugin, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG, exception=sentinel.ERROR_EX)
+ connection_info_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ botocore_utils.get_aws_connection_info(aws_plugin)
+
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert fail_args == call("sentinel.ERROR_MSG: sentinel.ERROR_EX")
+
+
+def test_get_aws_connection_info_exception_msg_plugin(monkeypatch, aws_plugin, botocore_utils):
+ connection_info_method = MagicMock(name="_aws_connection_info")
+ monkeypatch.setattr(botocore_utils, "_aws_connection_info", connection_info_method)
+
+ exception_nested = AnsibleBotocoreError(message=sentinel.ERROR_MSG)
+ connection_info_method.side_effect = exception_nested
+
+ with pytest.raises(FailException):
+ botocore_utils.get_aws_connection_info(aws_plugin)
+
+ passed_args = connection_info_method.call_args
+ assert passed_args == call(sentinel.PLUGIN_OPTIONS)
+ # call_args[0] == positional args
+ assert passed_args[0][0] is sentinel.PLUGIN_OPTIONS
+
+ fail_args = aws_plugin.fail_aws.call_args
+ assert fail_args == call("sentinel.ERROR_MSG")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/test_connection_base.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/test_connection_base.py
new file mode 100644
index 000000000..8708cf045
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/connection/test_connection_base.py
@@ -0,0 +1,49 @@
+# (c) 2023 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+from ansible.errors import AnsibleConnectionFailure
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.connection as utils_connection
+
+
+# pylint: disable=abstract-class-instantiated
+def test_fail(monkeypatch):
+ monkeypatch.setattr(utils_connection.AWSConnectionBase, "__abstractmethods__", set())
+ monkeypatch.setattr(utils_connection.ConnectionBase, "__init__", MagicMock(name="__init__"))
+
+ connection_plugin = utils_connection.AWSConnectionBase()
+ with pytest.raises(AnsibleConnectionFailure, match=str(sentinel.ERROR_MSG)):
+ connection_plugin._do_fail(sentinel.ERROR_MSG)
+
+
+# pylint: disable=abstract-class-instantiated
+def test_init(monkeypatch):
+ kwargs = {"example": sentinel.KWARG}
+ require_aws_sdk = MagicMock(name="require_aws_sdk")
+ require_aws_sdk.return_value = sentinel.RETURNED_SDK
+
+ monkeypatch.setattr(utils_connection.AWSConnectionBase, "__abstractmethods__", set())
+ monkeypatch.setattr(utils_connection.ConnectionBase, "__init__", MagicMock(name="__init__"))
+ monkeypatch.setattr(utils_connection.AWSConnectionBase, "require_aws_sdk", require_aws_sdk)
+
+ connection_plugin = utils_connection.AWSConnectionBase(sentinel.PARAM_TERMS, sentinel.PARAM_VARS, **kwargs)
+ assert require_aws_sdk.call_args == call(botocore_version=None, boto3_version=None)
+
+ connection_plugin = utils_connection.AWSConnectionBase(
+ sentinel.PARAM_ONE,
+ sentinel.PARAM_TWO,
+ boto3_version=sentinel.PARAM_BOTO3,
+ botocore_version=sentinel.PARAM_BOTOCORE,
+ **kwargs,
+ )
+ assert require_aws_sdk.call_args == call(
+ botocore_version=sentinel.PARAM_BOTOCORE, boto3_version=sentinel.PARAM_BOTO3
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_base.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_base.py
new file mode 100644
index 000000000..32eb3f7ab
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_base.py
@@ -0,0 +1,67 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible.plugins.inventory as base_inventory
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.inventory as utils_inventory
+
+
+@patch("ansible.plugins.inventory.BaseInventoryPlugin.parse", MagicMock)
+def test_parse(monkeypatch):
+ require_aws_sdk = MagicMock(name="require_aws_sdk")
+ require_aws_sdk.return_value = sentinel.RETURNED_SDK
+ config_data = MagicMock(name="_read_config_data")
+ config_data.return_value = sentinel.RETURNED_OPTIONS
+ frozen_credentials = MagicMock(name="_set_frozen_credentials")
+ frozen_credentials.return_value = sentinel.RETURNED_CREDENTIALS
+
+ inventory_plugin = utils_inventory.AWSInventoryBase()
+ monkeypatch.setattr(inventory_plugin, "require_aws_sdk", require_aws_sdk)
+ monkeypatch.setattr(inventory_plugin, "_read_config_data", config_data)
+ monkeypatch.setattr(inventory_plugin, "_set_frozen_credentials", frozen_credentials)
+
+ inventory_plugin.parse(sentinel.PARAM_INVENTORY, sentinel.PARAM_LOADER, sentinel.PARAM_PATH)
+ assert require_aws_sdk.call_args == call(botocore_version=None, boto3_version=None)
+ assert config_data.call_args == call(sentinel.PARAM_PATH)
+ assert frozen_credentials.call_args == call()
+
+
+@pytest.mark.parametrize(
+ "filename,result",
+ [
+ ("inventory_aws_ec2.yml", True),
+ ("inventory_aws_ec2.yaml", True),
+ ("inventory_aws_EC2.yaml", False),
+ ("inventory_Aws_ec2.yaml", False),
+ ("aws_ec2_inventory.yml", False),
+ ("aws_ec2.yml_inventory", False),
+ ("aws_ec2.yml", True),
+ ("aws_ec2.yaml", True),
+ ],
+)
+def test_inventory_verify_file(monkeypatch, filename, result):
+ base_verify = MagicMock(name="verify_file")
+ monkeypatch.setattr(base_inventory.BaseInventoryPlugin, "verify_file", base_verify)
+ inventory_plugin = utils_inventory.AWSInventoryBase()
+
+ # With INVENTORY_FILE_SUFFIXES not set, we should simply pass through the return from the base
+ base_verify.return_value = True
+ assert inventory_plugin.verify_file(filename) is True
+ base_verify.return_value = False
+ assert inventory_plugin.verify_file(filename) is False
+
+ # With INVENTORY_FILE_SUFFIXES set, we only return True of the base is good *and* the filename matches
+ inventory_plugin.INVENTORY_FILE_SUFFIXES = ("aws_ec2.yml", "aws_ec2.yaml")
+ base_verify.return_value = True
+ assert inventory_plugin.verify_file(filename) is result
+ base_verify.return_value = False
+ assert inventory_plugin.verify_file(filename) is False
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_clients.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_clients.py
new file mode 100644
index 000000000..82831ac56
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/inventory/test_inventory_clients.py
@@ -0,0 +1,103 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.base as utils_base
+import ansible_collections.amazon.aws.plugins.plugin_utils.inventory as utils_inventory
+
+# import ansible_collections.amazon.aws.plugins.module_utils.
+
+
+def test_client(monkeypatch):
+ super_client = MagicMock(name="client")
+ super_client.return_value = sentinel.SUPER_CLIENT
+ monkeypatch.setattr(utils_base.AWSPluginBase, "client", super_client)
+ inventory_plugin = utils_inventory.AWSInventoryBase()
+
+ client = inventory_plugin.client(sentinel.SERVICE_NAME)
+ assert super_client.call_args == call(sentinel.SERVICE_NAME)
+ assert client is sentinel.SUPER_CLIENT
+
+ client = inventory_plugin.client(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert super_client.call_args == call(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert client is sentinel.SUPER_CLIENT
+
+ frozen_creds = {"credential_one": sentinel.CREDENTIAL_ONE}
+ inventory_plugin._frozen_credentials = frozen_creds
+
+ client = inventory_plugin.client(sentinel.SERVICE_NAME)
+ assert super_client.call_args == call(sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ONE)
+ assert client is sentinel.SUPER_CLIENT
+
+ client = inventory_plugin.client(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert super_client.call_args == call(
+ sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ONE, extra_arg=sentinel.EXTRA_ARG
+ )
+ assert client is sentinel.SUPER_CLIENT
+
+ client = inventory_plugin.client(sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ARG)
+ assert super_client.call_args == call(
+ sentinel.SERVICE_NAME,
+ credential_one=sentinel.CREDENTIAL_ARG,
+ )
+ assert client is sentinel.SUPER_CLIENT
+
+
+def test_resource(monkeypatch):
+ super_resource = MagicMock(name="resource")
+ super_resource.return_value = sentinel.SUPER_RESOURCE
+ monkeypatch.setattr(utils_base.AWSPluginBase, "resource", super_resource)
+ inventory_plugin = utils_inventory.AWSInventoryBase()
+
+ resource = inventory_plugin.resource(sentinel.SERVICE_NAME)
+ assert super_resource.call_args == call(sentinel.SERVICE_NAME)
+ assert resource is sentinel.SUPER_RESOURCE
+
+ resource = inventory_plugin.resource(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert super_resource.call_args == call(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert resource is sentinel.SUPER_RESOURCE
+
+ frozen_creds = {"credential_one": sentinel.CREDENTIAL_ONE}
+ inventory_plugin._frozen_credentials = frozen_creds
+
+ resource = inventory_plugin.resource(sentinel.SERVICE_NAME)
+ assert super_resource.call_args == call(sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ONE)
+ assert resource is sentinel.SUPER_RESOURCE
+
+ resource = inventory_plugin.resource(sentinel.SERVICE_NAME, extra_arg=sentinel.EXTRA_ARG)
+ assert super_resource.call_args == call(
+ sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ONE, extra_arg=sentinel.EXTRA_ARG
+ )
+ assert resource is sentinel.SUPER_RESOURCE
+
+ resource = inventory_plugin.resource(sentinel.SERVICE_NAME, credential_one=sentinel.CREDENTIAL_ARG)
+ assert super_resource.call_args == call(
+ sentinel.SERVICE_NAME,
+ credential_one=sentinel.CREDENTIAL_ARG,
+ )
+ assert resource is sentinel.SUPER_RESOURCE
+
+
+def test_all_clients(monkeypatch):
+ test_regions = ["us-east-1", "us-east-2"]
+ inventory_plugin = utils_inventory.AWSInventoryBase()
+ mock_client = MagicMock(name="client")
+ mock_client.return_value = sentinel.RETURN_CLIENT
+ monkeypatch.setattr(inventory_plugin, "client", mock_client)
+ boto3_regions = MagicMock(name="_boto3_regions")
+ boto3_regions.return_value = test_regions
+ monkeypatch.setattr(inventory_plugin, "_boto3_regions", boto3_regions)
+
+ regions = []
+ for client, region in inventory_plugin.all_clients(sentinel.ARG_SERVICE):
+ assert boto3_regions.call_args == call(service=sentinel.ARG_SERVICE)
+ assert mock_client.call_args == call(sentinel.ARG_SERVICE, region=region)
+ assert client is sentinel.RETURN_CLIENT
+ regions.append(region)
+
+ assert set(regions) == set(test_regions)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/test_lookup_base.py b/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/test_lookup_base.py
new file mode 100644
index 000000000..7e90ecdeb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugin_utils/lookup/test_lookup_base.py
@@ -0,0 +1,48 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import sentinel
+
+import pytest
+
+from ansible.errors import AnsibleLookupError
+
+import ansible_collections.amazon.aws.plugins.plugin_utils.lookup as utils_lookup
+
+
+def test_fail_aws():
+ lookup_plugin = utils_lookup.AWSLookupBase()
+ with pytest.raises(AnsibleLookupError, match=str(sentinel.ERROR_MSG)):
+ lookup_plugin._do_fail(sentinel.ERROR_MSG)
+
+
+def test_run(monkeypatch):
+ kwargs = {"example": sentinel.KWARG}
+ require_aws_sdk = MagicMock(name="require_aws_sdk")
+ require_aws_sdk.return_value = sentinel.RETURNED_SDK
+ set_options = MagicMock(name="set_options")
+ set_options.return_value = sentinel.RETURNED_OPTIONS
+
+ lookup_plugin = utils_lookup.AWSLookupBase()
+ monkeypatch.setattr(lookup_plugin, "require_aws_sdk", require_aws_sdk)
+ monkeypatch.setattr(lookup_plugin, "set_options", set_options)
+
+ lookup_plugin.run(sentinel.PARAM_TERMS, sentinel.PARAM_VARS, **kwargs)
+ assert require_aws_sdk.call_args == call(botocore_version=None, boto3_version=None)
+ assert set_options.call_args == call(var_options=sentinel.PARAM_VARS, direct=kwargs)
+
+ lookup_plugin.run(
+ sentinel.PARAM_TERMS,
+ sentinel.PARAM_VARS,
+ boto3_version=sentinel.PARAM_BOTO3,
+ botocore_version=sentinel.PARAM_BOTOCORE,
+ **kwargs,
+ )
+ assert require_aws_sdk.call_args == call(
+ botocore_version=sentinel.PARAM_BOTOCORE, boto3_version=sentinel.PARAM_BOTO3
+ )
+ assert set_options.call_args == call(var_options=sentinel.PARAM_VARS, direct=kwargs)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/inventory/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_ec2.py b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_ec2.py
index 5386fe6c7..8cced1662 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_ec2.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_ec2.py
@@ -17,96 +17,25 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
import pytest
-import datetime
-from unittest.mock import Mock, MagicMock
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
from ansible.errors import AnsibleError
-from ansible.parsing.dataloader import DataLoader
-from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import InventoryModule, instance_data_filter_to_boto_attr
-
-
-instances = {
- 'Instances': [
- {'Monitoring': {'State': 'disabled'},
- 'PublicDnsName': 'ec2-12-345-67-890.compute-1.amazonaws.com',
- 'State': {'Code': 16, 'Name': 'running'},
- 'EbsOptimized': False,
- 'LaunchTime': datetime.datetime(2017, 10, 31, 12, 59, 25),
- 'PublicIpAddress': '12.345.67.890',
- 'PrivateIpAddress': '098.76.54.321',
- 'ProductCodes': [],
- 'VpcId': 'vpc-12345678',
- 'StateTransitionReason': '',
- 'InstanceId': 'i-00000000000000000',
- 'EnaSupport': True,
- 'ImageId': 'ami-12345678',
- 'PrivateDnsName': 'ip-098-76-54-321.ec2.internal',
- 'KeyName': 'testkey',
- 'SecurityGroups': [{'GroupName': 'default', 'GroupId': 'sg-12345678'}],
- 'ClientToken': '',
- 'SubnetId': 'subnet-12345678',
- 'InstanceType': 't2.micro',
- 'NetworkInterfaces': [
- {'Status': 'in-use',
- 'MacAddress': '12:a0:50:42:3d:a4',
- 'SourceDestCheck': True,
- 'VpcId': 'vpc-12345678',
- 'Description': '',
- 'NetworkInterfaceId': 'eni-12345678',
- 'PrivateIpAddresses': [
- {'PrivateDnsName': 'ip-098-76-54-321.ec2.internal',
- 'PrivateIpAddress': '098.76.54.321',
- 'Primary': True,
- 'Association':
- {'PublicIp': '12.345.67.890',
- 'PublicDnsName': 'ec2-12-345-67-890.compute-1.amazonaws.com',
- 'IpOwnerId': 'amazon'}}],
- 'PrivateDnsName': 'ip-098-76-54-321.ec2.internal',
- 'Attachment':
- {'Status': 'attached',
- 'DeviceIndex': 0,
- 'DeleteOnTermination': True,
- 'AttachmentId': 'eni-attach-12345678',
- 'AttachTime': datetime.datetime(2017, 10, 31, 12, 59, 25)},
- 'Groups': [
- {'GroupName': 'default',
- 'GroupId': 'sg-12345678'}],
- 'Ipv6Addresses': [],
- 'OwnerId': '123456789012',
- 'PrivateIpAddress': '098.76.54.321',
- 'SubnetId': 'subnet-12345678',
- 'Association':
- {'PublicIp': '12.345.67.890',
- 'PublicDnsName': 'ec2-12-345-67-890.compute-1.amazonaws.com',
- 'IpOwnerId': 'amazon'}}],
- 'SourceDestCheck': True,
- 'Placement':
- {'Tenancy': 'default',
- 'GroupName': '',
- 'AvailabilityZone': 'us-east-1c'},
- 'Hypervisor': 'xen',
- 'BlockDeviceMappings': [
- {'DeviceName': '/dev/xvda',
- 'Ebs':
- {'Status': 'attached',
- 'DeleteOnTermination': True,
- 'VolumeId': 'vol-01234567890000000',
- 'AttachTime': datetime.datetime(2017, 10, 31, 12, 59, 26)}}],
- 'Architecture': 'x86_64',
- 'RootDeviceType': 'ebs',
- 'RootDeviceName': '/dev/xvda',
- 'VirtualizationType': 'hvm',
- 'Tags': [{'Value': 'test', 'Key': 'ansible'}, {'Value': 'aws_ec2', 'Key': 'Name'}],
- 'AmiLaunchIndex': 0}],
- 'ReservationId': 'r-01234567890000000',
- 'Groups': [],
- 'OwnerId': '123456789012'
-}
+
+from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import InventoryModule
+from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import _compile_values
+from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import _get_boto_attr_chain
+from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import _get_tag_hostname
+from ansible_collections.amazon.aws.plugins.inventory.aws_ec2 import _prepare_host_vars
@pytest.fixture()
@@ -140,236 +69,187 @@ def inventory():
return inventory
-def test_compile_values(inventory):
- found_value = instances['Instances'][0]
- chain_of_keys = instance_data_filter_to_boto_attr['instance.group-id']
- for attr in chain_of_keys:
- found_value = inventory._compile_values(found_value, attr)
- assert found_value == "sg-12345678"
-
-
-def test_get_boto_attr_chain(inventory):
- instance = instances['Instances'][0]
- assert inventory._get_boto_attr_chain('network-interface.addresses.private-ip-address', instance) == "098.76.54.321"
-
-
-def test_boto3_conn(inventory):
- inventory._options = {"aws_profile": "first_precedence",
- "aws_access_key": "test_access_key",
- "aws_secret_key": "test_secret_key",
- "aws_security_token": "test_security_token",
- "iam_role_arn": None}
- loader = DataLoader()
- inventory._set_credentials(loader)
- with pytest.raises(AnsibleError) as error_message:
- for _connection, _region in inventory._boto3_conn(regions=['us-east-1']):
- assert "Insufficient credentials found" in error_message
-
-
-def testget_all_hostnames_default(inventory):
- instance = instances['Instances'][0]
- assert inventory.get_all_hostnames(instance, hostnames=None) == ["ec2-12-345-67-890.compute-1.amazonaws.com", "ip-098-76-54-321.ec2.internal"]
-
-
-def testget_all_hostnames(inventory):
- hostnames = ['ip-address', 'dns-name']
- instance = instances['Instances'][0]
- assert inventory.get_all_hostnames(instance, hostnames) == ["12.345.67.890", "ec2-12-345-67-890.compute-1.amazonaws.com"]
-
-
-def testget_all_hostnames_dict(inventory):
- hostnames = [{'name': 'private-ip-address', 'separator': '_', 'prefix': 'tag:Name'}]
- instance = instances['Instances'][0]
- assert inventory.get_all_hostnames(instance, hostnames) == ["aws_ec2_098.76.54.321"]
-
-
-def testget_all_hostnames_with_2_tags(inventory):
- hostnames = ['tag:ansible', 'tag:Name']
- instance = instances['Instances'][0]
- assert inventory.get_all_hostnames(instance, hostnames) == ["test", "aws_ec2"]
-
-
-def test_get_preferred_hostname_default(inventory):
- instance = instances['Instances'][0]
- assert inventory._get_preferred_hostname(instance, hostnames=None) == "ec2-12-345-67-890.compute-1.amazonaws.com"
-
-
-def test_get_preferred_hostname(inventory):
- hostnames = ['ip-address', 'dns-name']
- instance = instances['Instances'][0]
- assert inventory._get_preferred_hostname(instance, hostnames) == "12.345.67.890"
-
-
-def test_get_preferred_hostname_dict(inventory):
- hostnames = [{'name': 'private-ip-address', 'separator': '_', 'prefix': 'tag:Name'}]
- instance = instances['Instances'][0]
- assert inventory._get_preferred_hostname(instance, hostnames) == "aws_ec2_098.76.54.321"
-
-
-def test_get_preferred_hostname_with_2_tags(inventory):
- hostnames = ['tag:ansible', 'tag:Name']
- instance = instances['Instances'][0]
- assert inventory._get_preferred_hostname(instance, hostnames) == "test"
-
-
-def test_set_credentials(inventory):
- inventory._options = {'aws_access_key': 'test_access_key',
- 'aws_secret_key': 'test_secret_key',
- 'aws_security_token': 'test_security_token',
- 'aws_profile': 'test_profile',
- 'iam_role_arn': 'arn:aws:iam::123456789012:role/test-role'}
- loader = DataLoader()
- inventory._set_credentials(loader)
-
- assert inventory.boto_profile == "test_profile"
- assert inventory.aws_access_key_id == "test_access_key"
- assert inventory.aws_secret_access_key == "test_secret_key"
- assert inventory.aws_security_token == "test_security_token"
- assert inventory.iam_role_arn == "arn:aws:iam::123456789012:role/test-role"
-
-
-def test_insufficient_credentials(inventory):
- inventory._options = {
- 'aws_access_key': None,
- 'aws_secret_key': None,
- 'aws_security_token': None,
- 'aws_profile': None,
- 'iam_role_arn': None
- }
- with pytest.raises(AnsibleError) as error_message:
- loader = DataLoader()
- inventory._set_credentials(loader)
- assert "Insufficient credentials found" in error_message
-
-
-def test_verify_file_bad_config(inventory):
- assert inventory.verify_file('not_aws_config.yml') is False
+@pytest.mark.parametrize(
+ "obj,expected",
+ [
+ (None, None),
+ ({}, None),
+ ({"GroupId": "test01"}, "test01"),
+ ({"GroupId": ["test01"]}, "test01"),
+ ({"GroupId": "test01"}, "test01"),
+ ({"GroupId": ["test01", "test02"]}, ["test01", "test02"]),
+ ([{"GroupId": ["test01", "test02"]}], ["test01", "test02"]),
+ ([{"GroupId": ["test01"]}], "test01"),
+ (
+ [{"GroupId": ["test01", "test02"]}, {"GroupId": ["test03", "test04"]}],
+ [["test01", "test02"], ["test03", "test04"]],
+ ),
+ (
+ ({"GroupId": ["test01", "test02"]}, {"GroupId": ["test03", "test04"]}),
+ [["test01", "test02"], ["test03", "test04"]],
+ ),
+ (({"GroupId": ["test01", "test02"]}, {}), ["test01", "test02"]),
+ ],
+)
+def test_compile_values(obj, expected):
+ assert _compile_values(obj, "GroupId") == expected
-def test_include_filters_with_no_filter(inventory):
- inventory._options = {
- 'filters': {},
- 'include_filters': [],
- }
- print(inventory.build_include_filters())
- assert inventory.build_include_filters() == [{}]
+@pytest.mark.parametrize(
+ "filter_name,expected",
+ [
+ ("ansible.aws.unexpected.file", "ansible.aws.unexpected.file"),
+ ("instance.group-id", "sg-0123456789"),
+ ("instance.group-name", "default"),
+ ("owner-id", "id-012345678L"),
+ ],
+)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._compile_values")
+def test_get_boto_attr_chain(m_compile_values, filter_name, expected):
+ m_compile_values.side_effect = lambda obj, attr: obj.get(attr)
+ instance = {"SecurityGroups": {"GroupName": "default", "GroupId": "sg-0123456789"}, "OwnerId": "id-012345678L"}
-def test_include_filters_with_include_filters_only(inventory):
- inventory._options = {
- 'filters': {},
- 'include_filters': [{"foo": "bar"}],
- }
- assert inventory.build_include_filters() == [{"foo": "bar"}]
+ assert _get_boto_attr_chain(filter_name, instance) == expected
-def test_include_filters_with_filter_and_include_filters(inventory):
- inventory._options = {
- 'filters': {"from_filter": 1},
- 'include_filters': [{"from_include_filter": "bar"}],
+@pytest.mark.parametrize(
+ "hostnames,expected",
+ [
+ ([], "test-instance.ansible.com"),
+ (["private-dns-name"], "test-instance.localhost"),
+ (["tag:os_version"], "RHEL"),
+ (["tag:os_version", "dns-name"], "RHEL"),
+ ([{"name": "Name", "prefix": "Phase"}], "dev_test-instance-01"),
+ ([{"name": "Name", "prefix": "Phase", "separator": "-"}], "dev-test-instance-01"),
+ ([{"name": "Name", "prefix": "OSVersion", "separator": "-"}], "test-instance-01"),
+ ([{"name": "Name", "separator": "-"}], "test-instance-01"),
+ ([{"name": "Name", "prefix": "Phase"}, "private-dns-name"], "dev_test-instance-01"),
+ ([{"name": "Name", "prefix": "Phase"}, "tag:os_version"], "dev_test-instance-01"),
+ (["private-dns-name", "dns-name"], "test-instance.localhost"),
+ (["private-dns-name", {"name": "Name", "separator": "-"}], "test-instance.localhost"),
+ (["private-dns-name", "tag:os_version"], "test-instance.localhost"),
+ (["OSRelease"], None),
+ ],
+)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_tag_hostname")
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_boto_attr_chain")
+def test_inventory_get_preferred_hostname(m_get_boto_attr_chain, m_get_tag_hostname, inventory, hostnames, expected):
+ instance = {
+ "Name": "test-instance-01",
+ "Phase": "dev",
+ "tag:os_version": ["RHEL", "CoreOS"],
+ "another_key": "another_value",
+ "dns-name": "test-instance.ansible.com",
+ "private-dns-name": "test-instance.localhost",
}
- print(inventory.build_include_filters())
- assert inventory.build_include_filters() == [
- {"from_filter": 1},
- {"from_include_filter": "bar"}]
+ inventory._sanitize_hostname = MagicMock()
+ inventory._sanitize_hostname.side_effect = lambda x: x
-def test_add_host_empty_hostnames(inventory):
- hosts = [
- {
- "Placement": {
- "AvailabilityZone": "us-east-1a",
- },
- "PublicDnsName": "ip-10-85-0-4.ec2.internal"
- },
- ]
- inventory._add_hosts(hosts, "aws_ec2", [])
- inventory.inventory.add_host.assert_called_with("ip-10-85-0-4.ec2.internal", group="aws_ec2")
+ m_get_boto_attr_chain.side_effect = lambda pref, instance: instance.get(pref)
+ m_get_tag_hostname.side_effect = lambda pref, instance: instance.get(pref)
+ assert expected == inventory._get_preferred_hostname(instance, hostnames)
-def test_add_host_with_hostnames_no_criteria(inventory):
- hosts = [{}]
- inventory._add_hosts(
- hosts, "aws_ec2", hostnames=["tag:Name", "private-dns-name", "dns-name"]
- )
- assert inventory.inventory.add_host.call_count == 0
+def test_inventory_get_preferred_hostname_failure(inventory):
+ instance = {}
+ hostnames = [{"value": "saome_value"}]
+ inventory._sanitize_hostname = MagicMock()
+ inventory._sanitize_hostname.side_effect = lambda x: x
-def test_add_host_with_hostnames_and_one_criteria(inventory):
- hosts = [
- {
- "Placement": {
- "AvailabilityZone": "us-east-1a",
- },
- "PublicDnsName": "sample-host",
- }
- ]
+ with pytest.raises(AnsibleError) as err:
+ inventory._get_preferred_hostname(instance, hostnames)
+ assert "A 'name' key must be defined in a hostnames dictionary." in err
- inventory._add_hosts(
- hosts, "aws_ec2", hostnames=["tag:Name", "private-dns-name", "dns-name"]
- )
- assert inventory.inventory.add_host.call_count == 1
- inventory.inventory.add_host.assert_called_with("sample-host", group="aws_ec2")
+@pytest.mark.parametrize("base_verify_file_return", [True, False])
+@pytest.mark.parametrize(
+ "filename,result",
+ [
+ ("inventory_aws_ec2.yml", True),
+ ("inventory_aws_ec2.yaml", True),
+ ("inventory_aws_EC2.yaml", False),
+ ("inventory_Aws_ec2.yaml", False),
+ ("aws_ec2_inventory.yml", False),
+ ("aws_ec2.yml_inventory", False),
+ ("aws_ec2.yml", True),
+ ("aws_ec2.yaml", True),
+ ],
+)
+@patch("ansible.plugins.inventory.BaseInventoryPlugin.verify_file")
+def test_inventory_verify_file(m_base_verify_file, inventory, base_verify_file_return, filename, result):
+ m_base_verify_file.return_value = base_verify_file_return
+ if not base_verify_file_return:
+ assert not inventory.verify_file(filename)
+ else:
+ assert result == inventory.verify_file(filename)
-def test_add_host_with_hostnames_and_two_matching_criteria(inventory):
- hosts = [
- {
- "Placement": {
- "AvailabilityZone": "us-east-1a",
- },
- "PublicDnsName": "name-from-PublicDnsName",
- "Tags": [{"Value": "name-from-tag-Name", "Key": "Name"}],
- }
- ]
- inventory._add_hosts(
- hosts, "aws_ec2", hostnames=["tag:Name", "private-dns-name", "dns-name"]
- )
- assert inventory.inventory.add_host.call_count == 1
- inventory.inventory.add_host.assert_called_with(
- "name-from-tag-Name", group="aws_ec2"
- )
+@pytest.mark.parametrize(
+ "preference,instance,expected",
+ [
+ ("tag:os_provider", {"Tags": []}, []),
+ ("tag:os_provider", {}, []),
+ ("tag:os_provider", {"Tags": [{"Key": "os_provider", "Value": "RedHat"}]}, ["RedHat"]),
+ ("tag:OS_Provider", {"Tags": [{"Key": "os_provider", "Value": "RedHat"}]}, []),
+ ("tag:tag:os_provider", {"Tags": [{"Key": "os_provider", "Value": "RedHat"}]}, []),
+ ("tag:os_provider=RedHat", {"Tags": [{"Key": "os_provider", "Value": "RedHat"}]}, ["os_provider_RedHat"]),
+ ("tag:os_provider=CoreOS", {"Tags": [{"Key": "os_provider", "Value": "RedHat"}]}, []),
+ (
+ "tag:os_provider=RedHat,os_release=7",
+ {"Tags": [{"Key": "os_provider", "Value": "RedHat"}, {"Key": "os_release", "Value": "8"}]},
+ ["os_provider_RedHat"],
+ ),
+ (
+ "tag:os_provider=RedHat,os_release=7",
+ {"Tags": [{"Key": "os_provider", "Value": "RedHat"}, {"Key": "os_release", "Value": "7"}]},
+ ["os_provider_RedHat", "os_release_7"],
+ ),
+ (
+ "tag:os_provider,os_release",
+ {"Tags": [{"Key": "os_provider", "Value": "RedHat"}, {"Key": "os_release", "Value": "7"}]},
+ ["RedHat", "7"],
+ ),
+ (
+ "tag:os_provider=RedHat,os_release",
+ {"Tags": [{"Key": "os_provider", "Value": "RedHat"}, {"Key": "os_release", "Value": "7"}]},
+ ["os_provider_RedHat", "7"],
+ ),
+ ],
+)
+def test_get_tag_hostname(preference, instance, expected):
+ assert expected == _get_tag_hostname(preference, instance)
-def test_add_host_with_hostnames_and_two_matching_criteria_and_allow_duplicated_hosts(
- inventory,
-):
- hosts = [
- {
- "Placement": {
- "AvailabilityZone": "us-east-1a",
+@pytest.mark.parametrize(
+ "_options, expected",
+ [
+ ({"filters": {}, "include_filters": []}, [{}]),
+ ({"filters": {}, "include_filters": [{"foo": "bar"}]}, [{"foo": "bar"}]),
+ (
+ {
+ "filters": {"from_filter": 1},
+ "include_filters": [{"from_include_filter": "bar"}],
},
- "PublicDnsName": "name-from-PublicDnsName",
- "Tags": [{"Value": "name-from-tag-Name", "Key": "Name"}],
- }
- ]
-
- inventory._add_hosts(
- hosts,
- "aws_ec2",
- hostnames=["tag:Name", "private-dns-name", "dns-name"],
- allow_duplicated_hosts=True,
- )
- assert inventory.inventory.add_host.call_count == 2
- inventory.inventory.add_host.assert_any_call(
- "name-from-PublicDnsName", group="aws_ec2"
- )
- inventory.inventory.add_host.assert_any_call("name-from-tag-Name", group="aws_ec2")
+ [{"from_filter": 1}, {"from_include_filter": "bar"}],
+ ),
+ ],
+)
+def test_inventory_build_include_filters(inventory, _options, expected):
+ inventory._options = _options
+ assert inventory.build_include_filters() == expected
-def test_sanitize_hostname(inventory):
- assert inventory._sanitize_hostname(1) == "1"
- assert inventory._sanitize_hostname("a:b") == "a_b"
- assert inventory._sanitize_hostname("a:/b") == "a__b"
- assert inventory._sanitize_hostname("example") == "example"
+@pytest.mark.parametrize("hostname,expected", [(1, "1"), ("a:b", "a_b"), ("a:/b", "a__b"), ("example", "example")])
+def test_sanitize_hostname(inventory, hostname, expected):
+ assert inventory._sanitize_hostname(hostname) == expected
def test_sanitize_hostname_legacy(inventory):
- inventory._sanitize_group_name = (
- inventory._legacy_script_compatible_group_sanitization
- )
+ inventory._sanitize_group_name = inventory._legacy_script_compatible_group_sanitization
assert inventory._sanitize_hostname("a:/b") == "a__b"
@@ -413,7 +293,6 @@ def test_sanitize_hostname_legacy(inventory):
],
)
def test_prepare_host_vars(
- inventory,
hostvars_prefix,
hostvars_suffix,
use_contrib_script_compatible_ec2_tag_keys,
@@ -425,7 +304,7 @@ def test_prepare_host_vars(
"Tags": [{"Key": "Name", "Value": "my-name"}],
}
assert (
- inventory.prepare_host_vars(
+ _prepare_host_vars(
original_host_vars,
hostvars_prefix,
hostvars_suffix,
@@ -472,43 +351,339 @@ def test_iter_entry(inventory):
assert entries[1][1]["a_tags_b"]["Name"] == "my-name"
-def test_query_empty(inventory):
- result = inventory._query("us-east-1", [], [], strict_permissions=True)
- assert result == {"aws_ec2": []}
+@pytest.mark.parametrize(
+ "include_filters,exclude_filters,instances_by_region,instances",
+ [
+ ([], [], [], []),
+ (
+ [4, 1, 2],
+ [],
+ [
+ [{"InstanceId": 4, "name": "instance-4"}],
+ [{"InstanceId": 1, "name": "instance-1"}],
+ [{"InstanceId": 2, "name": "instance-2"}],
+ ],
+ [
+ {"InstanceId": 1, "name": "instance-1"},
+ {"InstanceId": 2, "name": "instance-2"},
+ {"InstanceId": 4, "name": "instance-4"},
+ ],
+ ),
+ (
+ [],
+ [4, 1, 2],
+ [
+ [{"InstanceId": 4, "name": "instance-4"}],
+ [{"InstanceId": 1, "name": "instance-1"}],
+ [{"InstanceId": 2, "name": "instance-2"}],
+ ],
+ [],
+ ),
+ (
+ [1, 2],
+ [4],
+ [
+ [{"InstanceId": 4, "name": "instance-4"}],
+ [{"InstanceId": 1, "name": "instance-1"}],
+ [{"InstanceId": 2, "name": "instance-2"}],
+ ],
+ [{"InstanceId": 1, "name": "instance-1"}, {"InstanceId": 2, "name": "instance-2"}],
+ ),
+ (
+ [1, 2],
+ [1],
+ [
+ [{"InstanceId": 1, "name": "instance-1"}],
+ [{"InstanceId": 1, "name": "instance-1"}],
+ [{"InstanceId": 2, "name": "instance-2"}],
+ ],
+ [{"InstanceId": 2, "name": "instance-2"}],
+ ),
+ ],
+)
+def test_inventory_query(inventory, include_filters, exclude_filters, instances_by_region, instances):
+ inventory._get_instances_by_region = MagicMock()
+ inventory._get_instances_by_region.side_effect = instances_by_region
+
+ regions = ["us-east-1", "us-east-2"]
+ strict = False
+
+ params = {
+ "regions": regions,
+ "strict_permissions": strict,
+ "include_filters": [],
+ "exclude_filters": [],
+ "use_ssm_inventory": False,
+ }
+
+ for u in include_filters:
+ params["include_filters"].append({"Name": f"in_filters_{int(u)}", "Values": [u]})
+
+ for u in exclude_filters:
+ params["exclude_filters"].append({"Name": f"ex_filters_{int(u)}", "Values": [u]})
+
+ assert inventory._query(**params) == {"aws_ec2": instances}
+ if not instances_by_region:
+ inventory._get_instances_by_region.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "filters",
+ [
+ [],
+ [{"Name": "provider", "Values": "sample"}, {"Name": "instance-state-name", "Values": ["active"]}],
+ [
+ {"Name": "tags", "Values": "one_tag"},
+ ],
+ ],
+)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._describe_ec2_instances")
+def test_inventory_get_instances_by_region(m_describe_ec2_instances, inventory, filters):
+ boto3_conn = [(MagicMock(), "us-east-1"), (MagicMock(), "us-east-2")]
+
+ inventory.all_clients = MagicMock()
+ inventory.all_clients.return_value = boto3_conn
+
+ m_describe_ec2_instances.side_effect = [
+ {
+ "Reservations": [
+ {
+ "OwnerId": "owner01",
+ "RequesterId": "requester01",
+ "ReservationId": "id-0123",
+ "Instances": [
+ {"name": "id-1-0", "os": "RedHat"},
+ {"name": "id-1-1", "os": "CoreOS"},
+ {"name": "id-1-2", "os": "Fedora"},
+ ],
+ },
+ {
+ "OwnerId": "owner01",
+ "ReservationId": "id-0456",
+ "Instances": [{"name": "id-2-0", "phase": "uat"}, {"name": "id-2-1", "phase": "prod"}],
+ },
+ ]
+ },
+ {
+ "Reservations": [
+ {
+ "OwnerId": "owner02",
+ "ReservationId": "id-0789",
+ "Instances": [
+ {"name": "id012345789", "tags": {"phase": "units"}},
+ ],
+ }
+ ],
+ "Metadata": {"Status": "active"},
+ },
+ ]
+
+ expected = [
+ {
+ "name": "id-1-0",
+ "os": "RedHat",
+ "OwnerId": "owner01",
+ "RequesterId": "requester01",
+ "ReservationId": "id-0123",
+ },
+ {
+ "name": "id-1-1",
+ "os": "CoreOS",
+ "OwnerId": "owner01",
+ "RequesterId": "requester01",
+ "ReservationId": "id-0123",
+ },
+ {
+ "name": "id-1-2",
+ "os": "Fedora",
+ "OwnerId": "owner01",
+ "RequesterId": "requester01",
+ "ReservationId": "id-0123",
+ },
+ {"name": "id-2-0", "phase": "uat", "OwnerId": "owner01", "ReservationId": "id-0456", "RequesterId": ""},
+ {"name": "id-2-1", "phase": "prod", "OwnerId": "owner01", "ReservationId": "id-0456", "RequesterId": ""},
+ {
+ "name": "id012345789",
+ "tags": {"phase": "units"},
+ "OwnerId": "owner02",
+ "ReservationId": "id-0789",
+ "RequesterId": "",
+ },
+ ]
+
+ default_filter = {"Name": "instance-state-name", "Values": ["running", "pending", "stopping", "stopped"]}
+ regions = ["us-east-2", "us-east-4"]
+
+ assert inventory._get_instances_by_region(regions, filters, False) == expected
+ inventory.all_clients.assert_called_with("ec2")
+
+ if any((f["Name"] == "instance-state-name" for f in filters)):
+ filters.append(default_filter)
+
+ m_describe_ec2_instances.assert_has_calls([call(conn, filters) for conn, region in boto3_conn], any_order=True)
+
+
+@pytest.mark.parametrize("strict", [True, False])
+@pytest.mark.parametrize(
+ "error",
+ [
+ botocore.exceptions.ClientError(
+ {"Error": {"Code": 1, "Message": "Something went wrong"}, "ResponseMetadata": {"HTTPStatusCode": 404}},
+ "some_botocore_client_error",
+ ),
+ botocore.exceptions.ClientError(
+ {
+ "Error": {"Code": "UnauthorizedOperation", "Message": "Something went wrong"},
+ "ResponseMetadata": {"HTTPStatusCode": 403},
+ },
+ "some_botocore_client_error",
+ ),
+ botocore.exceptions.PaginationError(message="some pagination error"),
+ ],
+)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._describe_ec2_instances")
+def test_inventory_get_instances_by_region_failures(m_describe_ec2_instances, inventory, strict, error):
+ inventory.all_clients = MagicMock()
+ inventory.all_clients.return_value = [(MagicMock(), "us-west-2")]
+ inventory.fail_aws = MagicMock()
+ inventory.fail_aws.side_effect = SystemExit(1)
+
+ m_describe_ec2_instances.side_effect = error
+ regions = ["us-east-2", "us-east-4"]
+
+ if (
+ isinstance(error, botocore.exceptions.ClientError)
+ and error.response["ResponseMetadata"]["HTTPStatusCode"] == 403
+ and not strict
+ ):
+ assert inventory._get_instances_by_region(regions, [], strict) == []
+ else:
+ with pytest.raises(SystemExit):
+ inventory._get_instances_by_region(regions, [], strict)
+
+
+@pytest.mark.parametrize(
+ "hostnames,expected",
+ [
+ ([], ["test-instance.ansible.com", "test-instance.localhost"]),
+ (["private-dns-name"], ["test-instance.localhost"]),
+ (["tag:os_version"], ["RHEL", "CoreOS"]),
+ (["tag:os_version", "dns-name"], ["RHEL", "CoreOS", "test-instance.ansible.com"]),
+ ([{"name": "Name", "prefix": "Phase"}], ["dev_test-instance-01"]),
+ ([{"name": "Name", "prefix": "Phase", "separator": "-"}], ["dev-test-instance-01"]),
+ ([{"name": "Name", "prefix": "OSVersion", "separator": "-"}], ["test-instance-01"]),
+ ([{"name": "Name", "separator": "-"}], ["test-instance-01"]),
+ (
+ [{"name": "Name", "prefix": "Phase"}, "private-dns-name"],
+ ["dev_test-instance-01", "test-instance.localhost"],
+ ),
+ ([{"name": "Name", "prefix": "Phase"}, "tag:os_version"], ["dev_test-instance-01", "RHEL", "CoreOS"]),
+ (["private-dns-name", {"name": "Name", "separator": "-"}], ["test-instance.localhost", "test-instance-01"]),
+ (["OSRelease"], []),
+ ],
+)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_tag_hostname")
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_boto_attr_chain")
+def test_inventory_get_all_hostnames(m_get_boto_attr_chain, m_get_tag_hostname, inventory, hostnames, expected):
+ instance = {
+ "Name": "test-instance-01",
+ "Phase": "dev",
+ "tag:os_version": ["RHEL", "CoreOS"],
+ "another_key": "another_value",
+ "dns-name": "test-instance.ansible.com",
+ "private-dns-name": "test-instance.localhost",
+ }
+
+ inventory._sanitize_hostname = MagicMock()
+ inventory._sanitize_hostname.side_effect = lambda x: x
+
+ m_get_boto_attr_chain.side_effect = lambda pref, instance: instance.get(pref)
+ m_get_tag_hostname.side_effect = lambda pref, instance: instance.get(pref)
+ assert expected == inventory._get_all_hostnames(instance, hostnames)
-instance_foobar = {"InstanceId": "foobar"}
-instance_barfoo = {"InstanceId": "barfoo"}
+def test_inventory_get_all_hostnames_failure(inventory):
+ instance = {}
+ hostnames = [{"value": "some_value"}]
-def test_query_empty_include_only(inventory):
- inventory._get_instances_by_region = Mock(side_effect=[[instance_foobar]])
- result = inventory._query("us-east-1", [{"tag:Name": ["foobar"]}], [], strict_permissions=True)
- assert result == {"aws_ec2": [instance_foobar]}
+ with pytest.raises(AnsibleError) as err:
+ inventory._get_all_hostnames(instance, hostnames)
+ assert "A 'name' key must be defined in a hostnames dictionary." in err
-def test_query_empty_include_ordered(inventory):
- inventory._get_instances_by_region = Mock(side_effect=[[instance_foobar], [instance_barfoo]])
- result = inventory._query("us-east-1", [{"tag:Name": ["foobar"]}, {"tag:Name": ["barfoo"]}], [], strict_permissions=True)
- assert result == {"aws_ec2": [instance_barfoo, instance_foobar]}
- inventory._get_instances_by_region.assert_called_with('us-east-1', [{'Name': 'tag:Name', 'Values': ['barfoo']}], True)
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_ssm_information")
+def test_inventory__add_ssm_information(m_get_ssm_information, inventory):
+ instances = [
+ {"InstanceId": "i-001", "Name": "first-instance"},
+ {"InstanceId": "i-002", "Name": "another-instance"},
+ ]
+ result = {
+ "StatusCode": 200,
+ "Entities": [
+ {"Id": "i-001", "Data": {}},
+ {
+ "Id": "i-002",
+ "Data": {
+ "AWS:InstanceInformation": {
+ "Content": [{"os_type": "Linux", "os_name": "Fedora", "os_version": 37}]
+ }
+ },
+ },
+ ],
+ }
+ m_get_ssm_information.return_value = result
-def test_query_empty_include_exclude(inventory):
- inventory._get_instances_by_region = Mock(side_effect=[[instance_foobar], [instance_foobar]])
- result = inventory._query("us-east-1", [{"tag:Name": ["foobar"]}], [{"tag:Name": ["foobar"]}], strict_permissions=True)
- assert result == {"aws_ec2": []}
+ connection = MagicMock()
+ expected = [
+ {"InstanceId": "i-001", "Name": "first-instance"},
+ {
+ "InstanceId": "i-002",
+ "Name": "another-instance",
+ "SsmInventory": {"os_type": "Linux", "os_name": "Fedora", "os_version": 37},
+ },
+ ]
+
+ inventory._add_ssm_information(connection, instances)
+ assert expected == instances
+
+ filters = [{"Key": "AWS:InstanceInformation.InstanceId", "Values": [x["InstanceId"] for x in instances]}]
+ m_get_ssm_information.assert_called_once_with(connection, filters)
+
+
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_ec2._get_ssm_information")
+def test_inventory__get_multiple_ssm_inventories(m_get_ssm_information, inventory):
+ instances = [{"InstanceId": f"i-00{i}", "Name": f"instance {i}"} for i in range(41)]
+ result = {
+ "StatusCode": 200,
+ "Entities": [
+ {
+ "Id": f"i-00{i}",
+ "Data": {
+ "AWS:InstanceInformation": {
+ "Content": [{"os_type": "Linux", "os_name": "Fedora", "os_version": 37}]
+ }
+ },
+ }
+ for i in range(41)
+ ],
+ }
+ m_get_ssm_information.return_value = result
+
+ connection = MagicMock()
-def test_include_extra_api_calls_deprecated(inventory):
- inventory.display.deprecate = Mock()
- inventory._read_config_data = Mock()
- inventory._set_credentials = Mock()
- inventory._query = Mock(return_value=[])
+ expected = [
+ {
+ "InstanceId": f"i-00{i}",
+ "Name": f"instance {i}",
+ "SsmInventory": {"os_type": "Linux", "os_name": "Fedora", "os_version": 37},
+ }
+ for i in range(41)
+ ]
- inventory.parse(inventory=[], loader=None, path=None)
- assert inventory.display.deprecate.call_count == 0
+ inventory._add_ssm_information(connection, instances)
+ assert expected == instances
- inventory._options["include_extra_api_calls"] = True
- inventory.parse(inventory=[], loader=None, path=None)
- assert inventory.display.deprecate.call_count == 1
+ assert 2 == m_get_ssm_information.call_count
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_rds.py b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_rds.py
new file mode 100644
index 000000000..53be24a48
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/inventory/test_aws_rds.py
@@ -0,0 +1,674 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2022 Aubin Bikouo <@abikouo>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+import copy
+import random
+import string
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+import pytest
+
+try:
+ import botocore
+except ImportError:
+ # Handled by HAS_BOTO3
+ pass
+
+from ansible.errors import AnsibleError
+
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import InventoryModule
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import _add_tags_for_rds_hosts
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import _describe_db_clusters
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import _describe_db_instances
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import _find_hosts_with_valid_statuses
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import _get_rds_hostname
+from ansible_collections.amazon.aws.plugins.inventory.aws_rds import ansible_dict_to_boto3_filter_list
+from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
+
+if not HAS_BOTO3:
+ pytestmark = pytest.mark.skip("test_aws_rds.py requires the python modules 'boto3' and 'botocore'")
+
+
+def make_clienterror_exception(code="AccessDenied"):
+ return botocore.exceptions.ClientError(
+ {
+ "Error": {"Code": code, "Message": "User is not authorized to perform: xxx on resource: user yyyy"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "getXXX",
+ )
+
+
+@pytest.fixture()
+def inventory():
+ inventory = InventoryModule()
+ inventory.inventory = MagicMock()
+ inventory._populate_host_vars = MagicMock()
+
+ inventory.all_clients = MagicMock()
+ inventory.get_option = MagicMock()
+
+ inventory._set_composite_vars = MagicMock()
+ inventory._add_host_to_composed_groups = MagicMock()
+ inventory._add_host_to_keyed_groups = MagicMock()
+ inventory._read_config_data = MagicMock()
+ inventory._set_credentials = MagicMock()
+
+ inventory.get_cache_key = MagicMock()
+
+ inventory._cache = {}
+ return inventory
+
+
+@pytest.fixture()
+def connection():
+ conn = MagicMock()
+ return conn
+
+
+@pytest.mark.parametrize(
+ "suffix,result",
+ [
+ ("aws_rds.yml", True),
+ ("aws_rds.yaml", True),
+ ("aws_RDS.yml", False),
+ ("AWS_rds.yaml", False),
+ ],
+)
+def test_inventory_verify_file_suffix(inventory, suffix, result, tmp_path):
+ test_dir = tmp_path / "test_aws_rds"
+ test_dir.mkdir()
+ inventory_file = "inventory" + suffix
+ inventory_file = test_dir / inventory_file
+ inventory_file.write_text("my inventory")
+ assert result == inventory.verify_file(str(inventory_file))
+
+
+def test_inventory_verify_file_with_missing_file(inventory):
+ inventory_file = "this_file_does_not_exist_aws_rds.yml"
+ assert not inventory.verify_file(inventory_file)
+
+
+def generate_random_string(with_digits=True, with_punctuation=True, length=16):
+ data = string.ascii_letters
+ if with_digits:
+ data += string.digits
+ if with_punctuation:
+ data += string.punctuation
+ return "".join([random.choice(data) for i in range(length)])
+
+
+@pytest.mark.parametrize(
+ "hosts,statuses,expected",
+ [
+ (
+ [
+ {"host": "host1", "DBInstanceStatus": "Available", "Status": "active"},
+ {"host": "host2", "DBInstanceStatus": "Creating", "Status": "active"},
+ {"host": "host3", "DBInstanceStatus": "Stopped", "Status": "active"},
+ {"host": "host4", "DBInstanceStatus": "Configuring", "Status": "active"},
+ ],
+ ["Available"],
+ [{"host": "host1", "DBInstanceStatus": "Available", "Status": "active"}],
+ ),
+ (
+ [
+ {"host": "host1", "DBInstanceStatus": "Available", "Status": "active"},
+ {"host": "host2", "DBInstanceStatus": "Creating", "Status": "active"},
+ {"host": "host3", "DBInstanceStatus": "Stopped", "Status": "active"},
+ {"host": "host4", "DBInstanceStatus": "Configuring", "Status": "active"},
+ ],
+ ["all"],
+ [
+ {"host": "host1", "DBInstanceStatus": "Available", "Status": "active"},
+ {"host": "host2", "DBInstanceStatus": "Creating", "Status": "active"},
+ {"host": "host3", "DBInstanceStatus": "Stopped", "Status": "active"},
+ {"host": "host4", "DBInstanceStatus": "Configuring", "Status": "active"},
+ ],
+ ),
+ (
+ [
+ {"host": "host1", "DBInstanceStatus": "Available", "Status": "active"},
+ {"host": "host2", "DBInstanceStatus": "Creating", "Status": "Available"},
+ {"host": "host3", "DBInstanceStatus": "Stopped", "Status": "active"},
+ {"host": "host4", "DBInstanceStatus": "Configuring", "Status": "active"},
+ ],
+ ["Available"],
+ [
+ {"host": "host1", "DBInstanceStatus": "Available", "Status": "active"},
+ {"host": "host2", "DBInstanceStatus": "Creating", "Status": "Available"},
+ ],
+ ),
+ ],
+)
+def test_find_hosts_with_valid_statuses(hosts, statuses, expected):
+ assert expected == _find_hosts_with_valid_statuses(hosts, statuses)
+
+
+@pytest.mark.parametrize(
+ "host,expected",
+ [
+ ({"DBClusterIdentifier": "my_cluster_id"}, "my_cluster_id"),
+ ({"DBClusterIdentifier": "my_cluster_id", "DBInstanceIdentifier": "my_instance_id"}, "my_instance_id"),
+ ],
+)
+def test_get_rds_hostname(host, expected):
+ assert expected == _get_rds_hostname(host)
+
+
+@pytest.mark.parametrize("hosts", ["", "host1", "host2,host3", "host2,host3,host1"])
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_rds._get_rds_hostname")
+def test_inventory_format_inventory(m_get_rds_hostname, inventory, hosts):
+ hosts_vars = {
+ "host1": {"var10": "value10"},
+ "host2": {"var20": "value20", "var21": "value21"},
+ "host3": {"var30": "value30", "var31": "value31", "var32": "value32"},
+ }
+
+ m_get_rds_hostname.side_effect = lambda h: h["name"]
+
+ class _inventory_host(object):
+ def __init__(self, name, host_vars):
+ self.name = name
+ self.vars = host_vars
+
+ inventory.inventory = MagicMock()
+ inventory.inventory.get_host.side_effect = lambda x: _inventory_host(name=x, host_vars=hosts_vars.get(x))
+
+ hosts = [{"name": x} for x in hosts.split(",") if x]
+ expected = {
+ "_meta": {"hostvars": {x["name"]: hosts_vars.get(x["name"]) for x in hosts}},
+ "aws_rds": {"hosts": [x["name"] for x in hosts]},
+ }
+
+ assert expected == inventory._format_inventory(hosts)
+ if hosts == []:
+ m_get_rds_hostname.assert_not_called()
+
+
+@pytest.mark.parametrize("length", range(0, 10, 2))
+def test_inventory_populate(inventory, length):
+ group = "aws_rds"
+ hosts = [f"host_{int(i)}" for i in range(length)]
+
+ inventory._add_hosts = MagicMock()
+ inventory._populate(hosts=hosts)
+
+ inventory.inventory.add_group.assert_called_with("aws_rds")
+
+ if len(hosts) == 0:
+ inventory.inventory._add_hosts.assert_not_called()
+ inventory.inventory.add_child.assert_not_called()
+ else:
+ inventory._add_hosts.assert_called_with(hosts=hosts, group=group)
+ inventory.inventory.add_child.assert_called_with("all", group)
+
+
+def test_inventory_populate_from_source(inventory):
+ source_data = {
+ "_meta": {
+ "hostvars": {
+ "host_1_0": {"var10": "value10"},
+ "host_2": {"var2": "value2"},
+ "host_3": {"var3": ["value30", "value31", "value32"]},
+ }
+ },
+ "all": {"hosts": ["host_1_0", "host_1_1", "host_2", "host_3"]},
+ "aws_host_1": {"hosts": ["host_1_0", "host_1_1"]},
+ "aws_host_2": {"hosts": ["host_2"]},
+ "aws_host_3": {"hosts": ["host_3"]},
+ }
+
+ inventory._populate_from_source(source_data)
+ inventory.inventory.add_group.assert_has_calls(
+ [
+ call("aws_host_1"),
+ call("aws_host_2"),
+ call("aws_host_3"),
+ ],
+ any_order=True,
+ )
+ inventory.inventory.add_child.assert_has_calls(
+ [
+ call("all", "aws_host_1"),
+ call("all", "aws_host_2"),
+ call("all", "aws_host_3"),
+ ],
+ any_order=True,
+ )
+
+ inventory._populate_host_vars.assert_has_calls(
+ [
+ call(["host_1_0"], {"var10": "value10"}, "aws_host_1"),
+ call(["host_1_1"], {}, "aws_host_1"),
+ call(["host_2"], {"var2": "value2"}, "aws_host_2"),
+ call(["host_3"], {"var3": ["value30", "value31", "value32"]}, "aws_host_3"),
+ ],
+ any_order=True,
+ )
+
+
+@pytest.mark.parametrize("strict", [True, False])
+def test_add_tags_for_rds_hosts_with_no_hosts(connection, strict):
+ hosts = []
+
+ _add_tags_for_rds_hosts(connection, hosts, strict)
+ connection.list_tags_for_resource.assert_not_called()
+
+
+def test_add_tags_for_rds_hosts_with_hosts(connection):
+ hosts = [
+ {"DBInstanceArn": "dbarn1"},
+ {"DBInstanceArn": "dbarn2"},
+ {"DBClusterArn": "clusterarn1"},
+ ]
+
+ rds_hosts_tags = {
+ "dbarn1": {"TagList": ["tag1=dbarn1", "phase=units"]},
+ "dbarn2": {"TagList": ["tag2=dbarn2", "collection=amazon.aws"]},
+ "clusterarn1": {"TagList": ["tag1=clusterarn1", "tool=ansible-test"]},
+ }
+ connection.list_tags_for_resource.side_effect = lambda **kwargs: rds_hosts_tags.get(kwargs.get("ResourceName"))
+
+ _add_tags_for_rds_hosts(connection, hosts, strict=False)
+
+ assert hosts == [
+ {"DBInstanceArn": "dbarn1", "Tags": ["tag1=dbarn1", "phase=units"]},
+ {"DBInstanceArn": "dbarn2", "Tags": ["tag2=dbarn2", "collection=amazon.aws"]},
+ {"DBClusterArn": "clusterarn1", "Tags": ["tag1=clusterarn1", "tool=ansible-test"]},
+ ]
+
+
+def test_add_tags_for_rds_hosts_with_failure_not_strict(connection):
+ hosts = [{"DBInstanceArn": "dbarn1"}]
+
+ connection.list_tags_for_resource.side_effect = make_clienterror_exception()
+
+ _add_tags_for_rds_hosts(connection, hosts, strict=False)
+
+ assert hosts == [
+ {"DBInstanceArn": "dbarn1", "Tags": []},
+ ]
+
+
+def test_add_tags_for_rds_hosts_with_failure_strict(connection):
+ hosts = [{"DBInstanceArn": "dbarn1"}]
+
+ connection.list_tags_for_resource.side_effect = make_clienterror_exception()
+
+ with pytest.raises(botocore.exceptions.ClientError):
+ _add_tags_for_rds_hosts(connection, hosts, strict=True)
+
+
+ADD_TAGS_FOR_RDS_HOSTS = "ansible_collections.amazon.aws.plugins.inventory.aws_rds._add_tags_for_rds_hosts"
+
+
+@patch(ADD_TAGS_FOR_RDS_HOSTS)
+def test_describe_db_clusters(m_add_tags_for_rds_hosts, connection):
+ db_cluster = {
+ "DatabaseName": "my_sample_db",
+ "DBClusterIdentifier": "db_id_01",
+ "Status": "Stopped",
+ "DbClusterResourceId": "cluster_resource_id",
+ "DBClusterArn": "arn:xxx:xxxx",
+ "DeletionProtection": True,
+ }
+
+ connection.describe_db_clusters.return_value = {"DBClusters": [db_cluster]}
+
+ filters = generate_random_string(with_punctuation=False)
+ strict = False
+
+ result = _describe_db_clusters(connection=connection, filters=filters, strict=strict)
+
+ assert result == [db_cluster]
+
+ m_add_tags_for_rds_hosts.assert_called_with(connection, result, strict)
+
+
+@pytest.mark.parametrize("strict", [True, False])
+@patch(ADD_TAGS_FOR_RDS_HOSTS)
+def test_describe_db_clusters_with_access_denied(m_add_tags_for_rds_hosts, connection, strict):
+ connection.describe_db_clusters.side_effect = make_clienterror_exception()
+
+ filters = generate_random_string(with_punctuation=False)
+
+ if strict:
+ with pytest.raises(AnsibleError):
+ _describe_db_clusters(connection=connection, filters=filters, strict=strict)
+ else:
+ assert _describe_db_clusters(connection=connection, filters=filters, strict=strict) == []
+
+ m_add_tags_for_rds_hosts.assert_not_called()
+
+
+@patch(ADD_TAGS_FOR_RDS_HOSTS)
+def test_describe_db_clusters_with_client_error(m_add_tags_for_rds_hosts, connection):
+ connection.describe_db_clusters.side_effect = make_clienterror_exception(code="Unknown")
+
+ filters = generate_random_string(with_punctuation=False)
+ with pytest.raises(AnsibleError):
+ _describe_db_clusters(connection=connection, filters=filters, strict=False)
+
+ m_add_tags_for_rds_hosts.assert_not_called()
+
+
+@patch(ADD_TAGS_FOR_RDS_HOSTS)
+def test_describe_db_instances(m_add_tags_for_rds_hosts, connection):
+ db_instance = {
+ "DBInstanceIdentifier": "db_id_01",
+ "Status": "Stopped",
+ "DBName": "my_sample_db_01",
+ "DBClusterIdentifier": "db_cluster_001",
+ "DBInstanceArn": "arn:db:xxxx:xxxx:xxxx",
+ "Engine": "mysql",
+ }
+
+ conn_paginator = MagicMock()
+ paginate = MagicMock()
+
+ connection.get_paginator.return_value = conn_paginator
+ conn_paginator.paginate.return_value = paginate
+
+ paginate.build_full_result.return_value = {"DBInstances": [db_instance]}
+
+ filters = generate_random_string(with_punctuation=False)
+ strict = False
+
+ result = _describe_db_instances(connection=connection, filters=filters, strict=strict)
+
+ assert result == [db_instance]
+
+ m_add_tags_for_rds_hosts.assert_called_with(connection, result, strict)
+ connection.get_paginator.assert_called_with("describe_db_instances")
+ conn_paginator.paginate.assert_called_with(Filters=filters)
+
+
+DESCRIBE_DB_INSTANCES = "ansible_collections.amazon.aws.plugins.inventory.aws_rds._describe_db_instances"
+DESCRIBE_DB_CLUSTERS = "ansible_collections.amazon.aws.plugins.inventory.aws_rds._describe_db_clusters"
+FIND_HOSTS_WITH_VALID_STATUSES = (
+ "ansible_collections.amazon.aws.plugins.inventory.aws_rds._find_hosts_with_valid_statuses"
+)
+
+
+@pytest.mark.parametrize("gather_clusters", [True, False])
+@pytest.mark.parametrize("regions", range(1, 5))
+@patch(DESCRIBE_DB_INSTANCES)
+@patch(DESCRIBE_DB_CLUSTERS)
+@patch(FIND_HOSTS_WITH_VALID_STATUSES)
+def test_inventory_get_all_db_hosts(
+ m_find_hosts, m_describe_db_clusters, m_describe_db_instances, inventory, gather_clusters, regions
+):
+ params = {
+ "gather_clusters": gather_clusters,
+ "regions": [f"us-east-{int(i)}" for i in range(regions)],
+ "instance_filters": generate_random_string(),
+ "cluster_filters": generate_random_string(),
+ "strict": random.choice((True, False)),
+ "statuses": [random.choice(["Available", "Stopped", "Running", "Creating"]) for i in range(3)],
+ }
+
+ connections = [MagicMock() for i in range(regions)]
+
+ inventory.all_clients.return_value = [(connections[i], f"us-east-{int(i)}") for i in range(regions)]
+
+ ids = list(reversed(range(regions)))
+ db_instances = [{"DBInstanceIdentifier": f"db_00{int(i)}"} for i in ids]
+ db_clusters = [{"DBClusterIdentifier": f"cluster_00{int(i)}"} for i in ids]
+
+ m_describe_db_instances.side_effect = [[i] for i in db_instances]
+ m_describe_db_clusters.side_effect = [[i] for i in db_clusters]
+
+ result = list(sorted(db_instances, key=lambda x: x["DBInstanceIdentifier"]))
+ if gather_clusters:
+ result += list(sorted(db_clusters, key=lambda x: x["DBClusterIdentifier"]))
+
+ m_find_hosts.return_value = result
+
+ assert result == inventory._get_all_db_hosts(**params)
+ inventory.all_clients.assert_called_with("rds")
+ m_describe_db_instances.assert_has_calls(
+ [call(connections[i], params["instance_filters"], strict=params["strict"]) for i in range(regions)]
+ )
+
+ if gather_clusters:
+ m_describe_db_clusters.assert_has_calls(
+ [call(connections[i], params["cluster_filters"], strict=params["strict"]) for i in range(regions)]
+ )
+
+ m_find_hosts.assert_called_with(result, params["statuses"])
+
+
+@pytest.mark.parametrize("hostvars_prefix", [True])
+@pytest.mark.parametrize("hostvars_suffix", [True])
+@patch("ansible_collections.amazon.aws.plugins.inventory.aws_rds._get_rds_hostname")
+def test_inventory_add_hosts(m_get_rds_hostname, inventory, hostvars_prefix, hostvars_suffix):
+ _options = {
+ "strict": random.choice((False, True)),
+ "compose": random.choice((False, True)),
+ "keyed_groups": "keyed_group_test_inventory_add_hosts",
+ "groups": ["all", "test_inventory_add_hosts"],
+ }
+
+ if hostvars_prefix:
+ _options["hostvars_prefix"] = f"prefix_{generate_random_string(length=8, with_punctuation=False)}"
+ if hostvars_suffix:
+ _options["hostvars_suffix"] = f"suffix_{generate_random_string(length=8, with_punctuation=False)}"
+
+ def _get_option_side_effect(x):
+ return _options.get(x)
+
+ inventory.get_option.side_effect = _get_option_side_effect
+
+ m_get_rds_hostname.side_effect = lambda h: (
+ h["DBInstanceIdentifier"] if "DBInstanceIdentifier" in h else h["DBClusterIdentifier"]
+ )
+
+ hosts = [
+ {
+ "DBInstanceIdentifier": "db_i_001",
+ "Tags": [{"Key": "Name", "Value": "db_001"}, {"Key": "RunningEngine", "Value": "mysql"}],
+ "availability_zone": "us-east-1a",
+ },
+ {
+ "DBInstanceIdentifier": "db_i_002",
+ "Tags": [{"Key": "ClusterName", "Value": "test_cluster"}, {"Key": "RunningOS", "Value": "CoreOS"}],
+ },
+ {
+ "DBClusterIdentifier": "test_cluster",
+ "Tags": [{"Key": "CluserVersionOrigin", "Value": "2.0"}, {"Key": "Provider", "Value": "RedHat"}],
+ },
+ {
+ "DBClusterIdentifier": "another_cluster",
+ "Tags": [{"Key": "TestingPurpose", "Value": "Ansible"}],
+ "availability_zones": ["us-west-1a", "us-east-1b"],
+ },
+ ]
+
+ group = f"test_add_hosts_group_{generate_random_string(length=10, with_punctuation=False)}"
+ inventory._add_hosts(hosts, group)
+
+ m_get_rds_hostname.assert_has_calls([call(h) for h in hosts], any_order=True)
+
+ hosts_names = ["db_i_001", "db_i_002", "test_cluster", "another_cluster"]
+ inventory.inventory.add_host.assert_has_calls([call(name, group=group) for name in hosts_names], any_order=True)
+
+ camel_hosts = [
+ {
+ "db_instance_identifier": "db_i_001",
+ "tags": {"Name": "db_001", "RunningEngine": "mysql"},
+ "availability_zone": "us-east-1a",
+ "region": "us-east-1",
+ },
+ {"db_instance_identifier": "db_i_002", "tags": {"ClusterName": "test_cluster", "RunningOS": "CoreOS"}},
+ {"db_cluster_identifier": "test_cluster", "tags": {"CluserVersionOrigin": "2.0", "Provider": "RedHat"}},
+ {
+ "db_cluster_identifier": "another_cluster",
+ "tags": {"TestingPurpose": "Ansible"},
+ "availability_zones": ["us-west-1a", "us-east-1b"],
+ "region": "us-west-1",
+ },
+ ]
+
+ set_variable_calls = []
+ for i in range(len(camel_hosts)):
+ for var, value in camel_hosts[i].items():
+ if hostvars_prefix:
+ var = _options["hostvars_prefix"] + var
+ if hostvars_suffix:
+ var += _options["hostvars_suffix"]
+ set_variable_calls.append(call(hosts_names[i], var, value))
+
+ inventory.inventory.set_variable.assert_has_calls(set_variable_calls, any_order=True)
+
+ if hostvars_prefix or hostvars_suffix:
+ tmp = []
+ for host in camel_hosts:
+ new_host = copy.deepcopy(host)
+ for key in host:
+ new_key = key
+ if hostvars_prefix:
+ new_key = _options["hostvars_prefix"] + new_key
+ if hostvars_suffix:
+ new_key += _options["hostvars_suffix"]
+ new_host[new_key] = host[key]
+ tmp.append(new_host)
+ camel_hosts = tmp
+
+ inventory._set_composite_vars.assert_has_calls(
+ [
+ call(_options["compose"], camel_hosts[i], hosts_names[i], strict=_options["strict"])
+ for i in range(len(camel_hosts))
+ ],
+ any_order=True,
+ )
+ inventory._add_host_to_composed_groups.assert_has_calls(
+ [
+ call(_options["groups"], camel_hosts[i], hosts_names[i], strict=_options["strict"])
+ for i in range(len(camel_hosts))
+ ],
+ any_order=True,
+ )
+ inventory._add_host_to_keyed_groups.assert_has_calls(
+ [
+ call(_options["keyed_groups"], camel_hosts[i], hosts_names[i], strict=_options["strict"])
+ for i in range(len(camel_hosts))
+ ],
+ any_order=True,
+ )
+
+
+BASE_INVENTORY_PARSE = "ansible_collections.amazon.aws.plugins.inventory.aws_rds.AWSInventoryBase.parse"
+
+
+@pytest.mark.parametrize("include_clusters", [True, False])
+@pytest.mark.parametrize("filter_db_cluster_id", [True, False])
+@pytest.mark.parametrize("user_cache_directive", [True, False])
+@pytest.mark.parametrize("cache", [True, False])
+@pytest.mark.parametrize("cache_hit", [True, False])
+@patch(BASE_INVENTORY_PARSE)
+def test_inventory_parse(
+ m_parse, inventory, include_clusters, filter_db_cluster_id, user_cache_directive, cache, cache_hit
+):
+ inventory_data = MagicMock()
+ loader = MagicMock()
+ path = generate_random_string(with_punctuation=False, with_digits=False)
+
+ options = {}
+ options["regions"] = [f"us-east-{d}" for d in range(random.randint(1, 5))]
+ options["strict_permissions"] = random.choice((True, False))
+ options["statuses"] = generate_random_string(with_punctuation=False)
+ options["include_clusters"] = include_clusters
+ options["filters"] = {
+ "db-instance-id": [
+ f"arn:db:{generate_random_string(with_punctuation=False)}" for i in range(random.randint(1, 10))
+ ],
+ "dbi-resource-id": generate_random_string(with_punctuation=False),
+ "domain": generate_random_string(with_digits=False, with_punctuation=False),
+ "engine": generate_random_string(with_digits=False, with_punctuation=False),
+ }
+ if filter_db_cluster_id:
+ options["filters"]["db-cluster-id"] = [
+ f"arn:cluster:{generate_random_string(with_punctuation=False)}" for i in range(random.randint(1, 10))
+ ]
+
+ options["cache"] = user_cache_directive
+
+ def get_option_side_effect(v):
+ return options.get(v)
+
+ inventory.get_option.side_effect = get_option_side_effect
+
+ cache_key = path + generate_random_string()
+ inventory.get_cache_key.return_value = cache_key
+
+ cache_key_value = generate_random_string()
+ if cache_hit:
+ inventory._cache[cache_key] = cache_key_value
+
+ inventory._populate = MagicMock()
+ inventory._populate_from_source = MagicMock()
+ inventory._get_all_db_hosts = MagicMock()
+ all_db_hosts = [
+ {"host": f"host_{int(random.randint(1, 1000))}"},
+ {"host": f"host_{int(random.randint(1, 1000))}"},
+ {"host": f"host_{int(random.randint(1, 1000))}"},
+ {"host": f"host_{int(random.randint(1, 1000))}"},
+ ]
+ inventory._get_all_db_hosts.return_value = all_db_hosts
+
+ format_cache_key_value = f"format_inventory_{all_db_hosts}"
+ inventory._format_inventory = MagicMock()
+ inventory._format_inventory.return_value = format_cache_key_value
+
+ inventory.parse(inventory_data, loader, path, cache)
+
+ m_parse.assert_called_with(inventory_data, loader, path, cache=cache)
+
+ boto3_instance_filters = ansible_dict_to_boto3_filter_list(options["filters"])
+ boto3_cluster_filters = []
+ if filter_db_cluster_id and include_clusters:
+ boto3_cluster_filters = ansible_dict_to_boto3_filter_list(
+ {"db-cluster-id": options["filters"]["db-cluster-id"]}
+ )
+
+ if not cache or not user_cache_directive or (cache and user_cache_directive and not cache_hit):
+ inventory._get_all_db_hosts.assert_called_with(
+ options["regions"],
+ boto3_instance_filters,
+ boto3_cluster_filters,
+ options["strict_permissions"],
+ options["statuses"],
+ include_clusters,
+ )
+ inventory._populate.assert_called_with(all_db_hosts)
+ inventory._format_inventory.assert_called_with(all_db_hosts)
+ else:
+ inventory._get_all_db_hosts.assert_not_called()
+ inventory._populate.assert_not_called()
+ inventory._format_inventory.assert_not_called()
+
+ if cache and user_cache_directive and cache_hit:
+ inventory._populate_from_source.assert_called_with(cache_key_value)
+
+ if cache and user_cache_directive and not cache_hit or (not cache and user_cache_directive):
+ # validate that cache was populated
+ assert inventory._cache[cache_key] == format_cache_key_value
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/lookup/test_secretsmanager_secret.py b/ansible_collections/amazon/aws/tests/unit/plugins/lookup/test_secretsmanager_secret.py
new file mode 100644
index 000000000..2c8260b61
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/lookup/test_secretsmanager_secret.py
@@ -0,0 +1,348 @@
+#
+# (c) 2024 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import random
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+from unittest.mock import call
+
+import pytest
+from botocore.exceptions import ClientError
+
+from ansible.errors import AnsibleLookupError
+
+# from ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret import AnsibleLookupError
+from ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret import LookupModule
+
+
+@pytest.fixture
+def lookup_plugin():
+ lookup = LookupModule()
+ lookup.params = {}
+
+ lookup.get_option = MagicMock()
+
+ def _get_option(x):
+ return lookup.params.get(x)
+
+ lookup.get_option.side_effect = _get_option
+ lookup.client = MagicMock()
+
+ return lookup
+
+
+def pick_from_list(elements=None):
+ if elements is None:
+ elements = ["error", "warn", "skip"]
+ return random.choice(elements)
+
+
+def _raise_boto_clienterror(code, msg):
+ params = {
+ "Error": {"Code": code, "Message": msg},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ }
+ return ClientError(params, "get_secret_value")
+
+
+class TestLookupModuleRun:
+ @pytest.mark.parametrize(
+ "params,err",
+ [
+ ({"on_missing": "test"}, '"on_missing" must be a string and one of "error", "warn" or "skip", not test'),
+ ({"on_denied": "return"}, '"on_denied" must be a string and one of "error", "warn" or "skip", not return'),
+ (
+ {"on_deleted": "delete"},
+ '"on_deleted" must be a string and one of "error", "warn" or "skip", not delete',
+ ),
+ (
+ {"on_missing": ["warn"]},
+ '"on_missing" must be a string and one of "error", "warn" or "skip", not [\'warn\']',
+ ),
+ ({"on_denied": True}, '"on_denied" must be a string and one of "error", "warn" or "skip", not True'),
+ (
+ {"on_deleted": {"error": True}},
+ '"on_deleted" must be a string and one of "error", "warn" or "skip", not {\'error\': True}',
+ ),
+ ],
+ )
+ def test_run_invalid_parameters(self, lookup_plugin, mocker, params, err):
+ aws_lookup_base_run = mocker.patch(
+ "ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret.AWSLookupBase.run"
+ )
+ aws_lookup_base_run.return_value = True
+ m_list_secrets = mocker.patch(
+ "ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret._list_secrets"
+ )
+ m_list_secrets.return_value = {"SecretList": []}
+
+ lookup_plugin.params = params
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.run(terms=["testing_secret"], variables=[])
+ assert err == str(exc_info.value)
+
+ def test_run_by_path(self, lookup_plugin, mocker):
+ aws_lookup_base_run = mocker.patch(
+ "ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret.AWSLookupBase.run"
+ )
+ aws_lookup_base_run.return_value = True
+ m_list_secrets = mocker.patch(
+ "ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret._list_secrets"
+ )
+ secrets_lists = [{"Name": "secret-0"}, {"Name": "secret-1"}, {"Name": "secret-2"}]
+ m_list_secrets.return_value = [{"SecretList": secrets_lists}]
+
+ params = {
+ "on_missing": pick_from_list(),
+ "on_denied": pick_from_list(),
+ "on_deleted": pick_from_list(),
+ "bypath": True,
+ }
+ lookup_plugin.params = params
+
+ lookup_plugin.get_secret_value = MagicMock()
+ secrets_values = {
+ "secret-0": "value-0",
+ "secret-1": "value-1",
+ "secret-2": "value-2",
+ }
+ lookup_plugin.get_secret_value.side_effect = lambda x, client, **kwargs: secrets_values.get(x)
+
+ secretsmanager_client = MagicMock()
+ lookup_plugin.client.return_value = secretsmanager_client
+
+ term = "term0"
+ assert [secrets_values] == lookup_plugin.run(terms=[term], variables=[])
+
+ m_list_secrets.assert_called_once_with(secretsmanager_client, term)
+ lookup_plugin.client.assert_called_once_with("secretsmanager", ANY)
+ lookup_plugin.get_secret_value.assert_has_calls(
+ [
+ call(
+ "secret-0",
+ secretsmanager_client,
+ on_missing=params.get("on_missing"),
+ on_denied=params.get("on_denied"),
+ ),
+ call(
+ "secret-1",
+ secretsmanager_client,
+ on_missing=params.get("on_missing"),
+ on_denied=params.get("on_denied"),
+ ),
+ call(
+ "secret-2",
+ secretsmanager_client,
+ on_missing=params.get("on_missing"),
+ on_denied=params.get("on_denied"),
+ ),
+ ]
+ )
+
+ @pytest.mark.parametrize("join_secrets", [True, False])
+ @pytest.mark.parametrize(
+ "terms", [["secret-0"], ["secret-0", "secret-1"], ["secret-0", "secret-1", "secret-0", "secret-2"]]
+ )
+ def test_run(self, lookup_plugin, mocker, join_secrets, terms):
+ aws_lookup_base_run = mocker.patch(
+ "ansible_collections.amazon.aws.plugins.lookup.secretsmanager_secret.AWSLookupBase.run"
+ )
+ aws_lookup_base_run.return_value = True
+
+ params = {
+ "on_missing": pick_from_list(),
+ "on_denied": pick_from_list(),
+ "on_deleted": pick_from_list(),
+ "bypath": False,
+ "version_stage": MagicMock(),
+ "version_id": MagicMock(),
+ "nested": pick_from_list([True, False]),
+ "join": join_secrets,
+ }
+ lookup_plugin.params = params
+
+ lookup_plugin.get_secret_value = MagicMock()
+ secrets_values = {
+ "secret-0": "value-0",
+ "secret-1": "value-1",
+ }
+ lookup_plugin.get_secret_value.side_effect = lambda x, client, **kwargs: secrets_values.get(x)
+
+ secretsmanager_client = MagicMock()
+ lookup_plugin.client.return_value = secretsmanager_client
+
+ expected_secrets = [secrets_values.get(x) for x in terms if secrets_values.get(x) is not None]
+ if join_secrets:
+ expected_secrets = ["".join(expected_secrets)]
+
+ assert expected_secrets == lookup_plugin.run(terms=terms, variables=[])
+
+ lookup_plugin.client.assert_called_once_with("secretsmanager", ANY)
+ lookup_plugin.get_secret_value.assert_has_calls(
+ [
+ call(
+ x,
+ secretsmanager_client,
+ version_stage=params.get("version_stage"),
+ version_id=params.get("version_id"),
+ on_missing=params.get("on_missing"),
+ on_denied=params.get("on_denied"),
+ on_deleted=params.get("on_deleted"),
+ nested=params.get("nested"),
+ )
+ for x in terms
+ ]
+ )
+
+
+class TestLookupModuleGetSecretValue:
+ def test_get_secret__invalid_nested_value(self, lookup_plugin):
+ params = {
+ "version_stage": MagicMock(),
+ "version_id": MagicMock(),
+ "on_missing": None,
+ "on_denied": None,
+ "on_deleted": None,
+ }
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ client = MagicMock()
+ lookup_plugin.get_secret_value("aws_invalid_nested_secret", client, nested=True, **params)
+ assert "Nested query must use the following syntax: `aws_secret_name.<key_name>.<key_name>" == str(
+ exc_info.value
+ )
+
+ @pytest.mark.parametrize("versionId", [None, MagicMock()])
+ @pytest.mark.parametrize("versionStage", [None, MagicMock()])
+ @pytest.mark.parametrize(
+ "term,nested,secretId",
+ [
+ ("secret0", False, "secret0"),
+ ("secret0.child", False, "secret0.child"),
+ ("secret0.child", True, "secret0"),
+ ("secret0.root.child", False, "secret0.root.child"),
+ ("secret0.root.child", True, "secret0"),
+ ],
+ )
+ def test_get_secret__binary_secret(self, lookup_plugin, versionId, versionStage, term, nested, secretId):
+ params = {
+ "version_stage": versionStage,
+ "version_id": versionId,
+ "on_missing": None,
+ "on_denied": None,
+ "on_deleted": None,
+ }
+
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ bin_secret_value = b"binary_value"
+ client.get_secret_value.return_value = {"SecretBinary": bin_secret_value}
+
+ assert bin_secret_value == lookup_plugin.get_secret_value(term, client, nested=nested, **params)
+ api_params = {"SecretId": secretId}
+ if versionId is not None:
+ api_params["VersionId"] = versionId
+ if versionStage:
+ api_params["VersionStage"] = versionStage
+ client.get_secret_value.assert_called_once_with(aws_retry=True, **api_params)
+
+ @pytest.mark.parametrize("on_missing", ["warn", "error"])
+ @pytest.mark.parametrize(
+ "term,missing_key",
+ [
+ ("secret_name.root.child1", "root.child1"),
+ ("secret_name.root.child1.nested", "root.child1"),
+ ("secret_name.root.child.nested1", "root.child.nested1"),
+ ("secret_name.root.child.nested.value", "root.child.nested.value"),
+ ],
+ )
+ def test_get_secret__missing_nested_secret(self, lookup_plugin, on_missing, term, missing_key):
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ json_secret = '{"root": {"child": {"nested": "ansible-test-secret-0"}}}'
+ client.get_secret_value.return_value = {"SecretString": json_secret}
+
+ if on_missing == "error":
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.get_secret_value(term, client, nested=True, on_missing=on_missing)
+ assert f"Successfully retrieved secret but there exists no key {missing_key} in the secret" == str(
+ exc_info.value
+ )
+ else:
+ lookup_plugin._display = MagicMock()
+ lookup_plugin._display.warning = MagicMock()
+ assert lookup_plugin.get_secret_value(term, client, nested=True, on_missing=on_missing) is None
+ lookup_plugin._display.warning.assert_called_once_with(
+ f"Skipping, Successfully retrieved secret but there exists no key {missing_key} in the secret"
+ )
+
+ def test_get_secret__missing_secret(self, lookup_plugin):
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ client.get_secret_value.side_effect = _raise_boto_clienterror("UnexpecteError", "unable to retrieve Secret")
+
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.get_secret_value(MagicMock(), client)
+ assert (
+ "Failed to retrieve secret: An error occurred (UnexpecteError) when calling the get_secret_value operation: unable to retrieve Secret"
+ == str(exc_info.value)
+ )
+
+ @pytest.mark.parametrize("on_denied", ["warn", "error"])
+ def test_get_secret__on_denied(self, lookup_plugin, on_denied):
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ client.get_secret_value.side_effect = _raise_boto_clienterror(
+ "AccessDeniedException", "Access denied to Secret"
+ )
+ term = "ansible-test-secret-0123"
+
+ if on_denied == "error":
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.get_secret_value(term, client, on_denied=on_denied)
+ assert f"Failed to access secret {term} (AccessDenied)" == str(exc_info.value)
+ else:
+ lookup_plugin._display = MagicMock()
+ lookup_plugin._display.warning = MagicMock()
+ assert lookup_plugin.get_secret_value(term, client, on_denied=on_denied) is None
+ lookup_plugin._display.warning.assert_called_once_with(f"Skipping, access denied for secret {term}")
+
+ @pytest.mark.parametrize("on_missing", ["warn", "error"])
+ def test_get_secret__on_missing(self, lookup_plugin, on_missing):
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ client.get_secret_value.side_effect = _raise_boto_clienterror("ResourceNotFoundException", "secret not found")
+ term = "ansible-test-secret-4561"
+
+ if on_missing == "error":
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.get_secret_value(term, client, on_missing=on_missing)
+ assert f"Failed to find secret {term} (ResourceNotFound)" == str(exc_info.value)
+ else:
+ lookup_plugin._display = MagicMock()
+ lookup_plugin._display.warning = MagicMock()
+ assert lookup_plugin.get_secret_value(term, client, on_missing=on_missing) is None
+ lookup_plugin._display.warning.assert_called_once_with(f"Skipping, did not find secret {term}")
+
+ @pytest.mark.parametrize("on_deleted", ["warn", "error"])
+ def test_get_secret__on_deleted(self, lookup_plugin, on_deleted):
+ client = MagicMock()
+ client.get_secret_value = MagicMock()
+ client.get_secret_value.side_effect = _raise_boto_clienterror(
+ "ResourceMarkedForDeletion", "marked for deletion"
+ )
+ term = "ansible-test-secret-8790"
+
+ if on_deleted == "error":
+ with pytest.raises(AnsibleLookupError) as exc_info:
+ lookup_plugin.get_secret_value(term, client, on_deleted=on_deleted)
+ assert f"Failed to find secret {term} (marked for deletion)" == str(exc_info.value)
+ else:
+ lookup_plugin._display = MagicMock()
+ lookup_plugin._display.warning = MagicMock()
+ assert lookup_plugin.get_secret_value(term, client, on_deleted=on_deleted) is None
+ lookup_plugin._display.warning.assert_called_once_with(
+ f"Skipping, did not find secret (marked for deletion) {term}"
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/conftest.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/conftest.py
index a7d1e0475..7a870163c 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/conftest.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/conftest.py
@@ -1,16 +1,13 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import json
import pytest
-from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.six import string_types
@pytest.fixture
@@ -18,14 +15,14 @@ def patch_ansible_module(request, mocker):
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
- if 'ANSIBLE_MODULE_ARGS' not in request.param:
- request.param = {'ANSIBLE_MODULE_ARGS': request.param}
- if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
- if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
+ if "ANSIBLE_MODULE_ARGS" not in request.param:
+ request.param = {"ANSIBLE_MODULE_ARGS": request.param}
+ if "_ansible_remote_tmp" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in request.param["ANSIBLE_MODULE_ARGS"]:
+ request.param["ANSIBLE_MODULE_ARGS"]["_ansible_keep_remote_files"] = False
args = json.dumps(request.param)
else:
- raise Exception('Malformed data to the patch_ansible_module pytest fixture')
+ raise Exception("Malformed data to the patch_ansible_module pytest fixture")
- mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))
+ mocker.patch("ansible.module_utils.basic._ANSIBLE_ARGS", to_bytes(args))
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/test_check_is_instance.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/test_check_is_instance.py
new file mode 100644
index 000000000..0afeab56a
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_eip/test_check_is_instance.py
@@ -0,0 +1,65 @@
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_eip
+
+EXAMPLE_DATA = [
+ (
+ None,
+ True,
+ False,
+ ),
+ (
+ None,
+ False,
+ False,
+ ),
+ (
+ "",
+ True,
+ False,
+ ),
+ (
+ "",
+ False,
+ False,
+ ),
+ (
+ "i-123456789",
+ True,
+ True,
+ ),
+ (
+ "i-123456789",
+ False,
+ True,
+ ),
+ (
+ "eni-123456789",
+ True,
+ False,
+ ),
+ (
+ "junk",
+ True,
+ False,
+ ),
+ (
+ "junk",
+ False,
+ False,
+ ),
+]
+
+
+def test_check_is_instance_needs_in_vpc():
+ with pytest.raises(ec2_eip.EipError):
+ ec2_eip.check_is_instance("eni-123456789", False)
+
+
+@pytest.mark.parametrize("device,in_vpc,expected", EXAMPLE_DATA)
+def test_check_is_instance(device, in_vpc, expected):
+ result = ec2_eip.check_is_instance(device, in_vpc)
+ assert result is expected
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_build_run_instance_spec.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_build_run_instance_spec.py
index e889b676a..a64c16961 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_build_run_instance_spec.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_build_run_instance_spec.py
@@ -3,23 +3,21 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import sentinel
import pytest
-from ansible_collections.amazon.aws.tests.unit.compat.mock import sentinel
import ansible_collections.amazon.aws.plugins.modules.ec2_instance as ec2_instance_module
@pytest.fixture
def params_object():
params = {
- 'iam_instance_profile': None,
- 'exact_count': None,
- 'count': None,
- 'launch_template': None,
- 'instance_type': None,
+ "iam_instance_profile": None,
+ "exact_count": None,
+ "count": None,
+ "launch_template": None,
+ "instance_type": sentinel.INSTANCE_TYPE,
}
return params
@@ -29,11 +27,13 @@ def ec2_instance(monkeypatch):
# monkey patches various ec2_instance module functions, we'll separately test the operation of
# these functions, we just care that it's passing the results into the right place in the
# instance spec.
- monkeypatch.setattr(ec2_instance_module, 'build_top_level_options', lambda params: {'TOP_LEVEL_OPTIONS': sentinel.TOP_LEVEL})
- monkeypatch.setattr(ec2_instance_module, 'build_network_spec', lambda params: sentinel.NETWORK_SPEC)
- monkeypatch.setattr(ec2_instance_module, 'build_volume_spec', lambda params: sentinel.VOlUME_SPEC)
- monkeypatch.setattr(ec2_instance_module, 'build_instance_tags', lambda params: sentinel.TAG_SPEC)
- monkeypatch.setattr(ec2_instance_module, 'determine_iam_role', lambda params: sentinel.IAM_PROFILE_ARN)
+ monkeypatch.setattr(
+ ec2_instance_module, "build_top_level_options", lambda params: {"TOP_LEVEL_OPTIONS": sentinel.TOP_LEVEL}
+ )
+ monkeypatch.setattr(ec2_instance_module, "build_network_spec", lambda params: sentinel.NETWORK_SPEC)
+ monkeypatch.setattr(ec2_instance_module, "build_volume_spec", lambda params: sentinel.VOlUME_SPEC)
+ monkeypatch.setattr(ec2_instance_module, "build_instance_tags", lambda params: sentinel.TAG_SPEC)
+ monkeypatch.setattr(ec2_instance_module, "determine_iam_role", lambda params: sentinel.IAM_PROFILE_ARN)
return ec2_instance_module
@@ -43,33 +43,37 @@ def _assert_defaults(instance_spec, to_skip=None):
assert isinstance(instance_spec, dict)
- if 'TagSpecifications' not in to_skip:
- assert 'TagSpecifications' in instance_spec
- assert instance_spec['TagSpecifications'] is sentinel.TAG_SPEC
+ if "TagSpecifications" not in to_skip:
+ assert "TagSpecifications" in instance_spec
+ assert instance_spec["TagSpecifications"] is sentinel.TAG_SPEC
- if 'NetworkInterfaces' not in to_skip:
- assert 'NetworkInterfaces' in instance_spec
- assert instance_spec['NetworkInterfaces'] is sentinel.NETWORK_SPEC
+ if "NetworkInterfaces" not in to_skip:
+ assert "NetworkInterfaces" in instance_spec
+ assert instance_spec["NetworkInterfaces"] is sentinel.NETWORK_SPEC
- if 'BlockDeviceMappings' not in to_skip:
- assert 'BlockDeviceMappings' in instance_spec
- assert instance_spec['BlockDeviceMappings'] is sentinel.VOlUME_SPEC
+ if "BlockDeviceMappings" not in to_skip:
+ assert "BlockDeviceMappings" in instance_spec
+ assert instance_spec["BlockDeviceMappings"] is sentinel.VOlUME_SPEC
- if 'IamInstanceProfile' not in to_skip:
+ if "IamInstanceProfile" not in to_skip:
# By default, this shouldn't be returned
- assert 'IamInstanceProfile' not in instance_spec
+ assert "IamInstanceProfile" not in instance_spec
- if 'MinCount' not in to_skip:
- assert 'MinCount' in instance_spec
- instance_spec['MinCount'] == 1
+ if "MinCount" not in to_skip:
+ assert "MinCount" in instance_spec
+ instance_spec["MinCount"] == 1
- if 'MaxCount' not in to_skip:
- assert 'MaxCount' in instance_spec
- instance_spec['MaxCount'] == 1
+ if "MaxCount" not in to_skip:
+ assert "MaxCount" in instance_spec
+ instance_spec["MaxCount"] == 1
- if 'TOP_LEVEL_OPTIONS' not in to_skip:
- assert 'TOP_LEVEL_OPTIONS' in instance_spec
- assert instance_spec['TOP_LEVEL_OPTIONS'] is sentinel.TOP_LEVEL
+ if "TOP_LEVEL_OPTIONS" not in to_skip:
+ assert "TOP_LEVEL_OPTIONS" in instance_spec
+ assert instance_spec["TOP_LEVEL_OPTIONS"] is sentinel.TOP_LEVEL
+
+ if "InstanceType" not in to_skip:
+ assert "InstanceType" in instance_spec
+ instance_spec["InstanceType"] == sentinel.INSTANCE_TYPE
def test_build_run_instance_spec_defaults(params_object, ec2_instance):
@@ -77,50 +81,72 @@ def test_build_run_instance_spec_defaults(params_object, ec2_instance):
_assert_defaults(instance_spec)
+def test_build_run_instance_spec_type_required(params_object, ec2_instance):
+ params_object["instance_type"] = None
+ params_object["launch_template"] = None
+ # Test that we throw an Ec2InstanceAWSError if passed neither
+ with pytest.raises(ec2_instance.Ec2InstanceAWSError):
+ instance_spec = ec2_instance.build_run_instance_spec(params_object)
+
+ # Test that instance_type can be None if launch_template is set
+ params_object["launch_template"] = sentinel.LAUNCH_TEMPLATE
+ instance_spec = ec2_instance.build_run_instance_spec(params_object)
+ _assert_defaults(instance_spec, ["InstanceType"])
+ assert "InstanceType" not in instance_spec
+
+
def test_build_run_instance_spec_tagging(params_object, ec2_instance, monkeypatch):
# build_instance_tags can return None, RunInstance doesn't like this
- monkeypatch.setattr(ec2_instance_module, 'build_instance_tags', lambda params: None)
+ monkeypatch.setattr(ec2_instance_module, "build_instance_tags", lambda params: None)
instance_spec = ec2_instance.build_run_instance_spec(params_object)
- _assert_defaults(instance_spec, ['TagSpecifications'])
- assert 'TagSpecifications' not in instance_spec
+ _assert_defaults(instance_spec, ["TagSpecifications"])
+ assert "TagSpecifications" not in instance_spec
# if someone *explicitly* passes {} (rather than not setting it), then [] can be returned
- monkeypatch.setattr(ec2_instance_module, 'build_instance_tags', lambda params: [])
+ monkeypatch.setattr(ec2_instance_module, "build_instance_tags", lambda params: [])
instance_spec = ec2_instance.build_run_instance_spec(params_object)
- _assert_defaults(instance_spec, ['TagSpecifications'])
- assert 'TagSpecifications' in instance_spec
- assert instance_spec['TagSpecifications'] == []
+ _assert_defaults(instance_spec, ["TagSpecifications"])
+ assert "TagSpecifications" in instance_spec
+ assert instance_spec["TagSpecifications"] == []
def test_build_run_instance_spec_instance_profile(params_object, ec2_instance):
- params_object['iam_instance_profile'] = sentinel.INSTANCE_PROFILE_NAME
+ params_object["iam_instance_profile"] = sentinel.INSTANCE_PROFILE_NAME
instance_spec = ec2_instance.build_run_instance_spec(params_object)
- _assert_defaults(instance_spec, ['IamInstanceProfile'])
- assert 'IamInstanceProfile' in instance_spec
- assert instance_spec['IamInstanceProfile'] == {'Arn': sentinel.IAM_PROFILE_ARN}
+ _assert_defaults(instance_spec, ["IamInstanceProfile"])
+ assert "IamInstanceProfile" in instance_spec
+ assert instance_spec["IamInstanceProfile"] == {"Arn": sentinel.IAM_PROFILE_ARN}
def test_build_run_instance_spec_count(params_object, ec2_instance):
# When someone passes 'count', that number of instances will be *launched*
- params_object['count'] = sentinel.COUNT
+ params_object["count"] = sentinel.COUNT
instance_spec = ec2_instance.build_run_instance_spec(params_object)
- _assert_defaults(instance_spec, ['MaxCount', 'MinCount'])
- assert 'MaxCount' in instance_spec
- assert 'MinCount' in instance_spec
- assert instance_spec['MaxCount'] == sentinel.COUNT
- assert instance_spec['MinCount'] == sentinel.COUNT
+ _assert_defaults(instance_spec, ["MaxCount", "MinCount"])
+ assert "MaxCount" in instance_spec
+ assert "MinCount" in instance_spec
+ assert instance_spec["MaxCount"] == sentinel.COUNT
+ assert instance_spec["MinCount"] == sentinel.COUNT
def test_build_run_instance_spec_exact_count(params_object, ec2_instance):
# The "exact_count" logic relies on enforce_count doing the math to figure out how many
# instances to start/stop. The enforce_count call is responsible for ensuring that 'to_launch'
# is set and is a positive integer.
- params_object['exact_count'] = sentinel.EXACT_COUNT
- params_object['to_launch'] = sentinel.TO_LAUNCH
+ params_object["exact_count"] = 42
+ params_object["to_launch"] = sentinel.TO_LAUNCH
instance_spec = ec2_instance.build_run_instance_spec(params_object)
- _assert_defaults(instance_spec, ['MaxCount', 'MinCount'])
- assert 'MaxCount' in instance_spec
- assert 'MinCount' in instance_spec
- assert instance_spec['MaxCount'] == sentinel.TO_LAUNCH
- assert instance_spec['MinCount'] == sentinel.TO_LAUNCH
+ _assert_defaults(instance_spec, ["MaxCount", "MinCount"])
+ assert "MaxCount" in instance_spec
+ assert "MinCount" in instance_spec
+ assert instance_spec["MaxCount"] == 42
+ assert instance_spec["MinCount"] == 42
+
+ instance_spec = ec2_instance.build_run_instance_spec(params_object, 7)
+
+ _assert_defaults(instance_spec, ["MaxCount", "MinCount"])
+ assert "MaxCount" in instance_spec
+ assert "MinCount" in instance_spec
+ assert instance_spec["MaxCount"] == 35
+ assert instance_spec["MinCount"] == 35
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_determine_iam_role.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_determine_iam_role.py
index cdde74c97..7645d5559 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_determine_iam_role.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_instance/test_determine_iam_role.py
@@ -3,16 +3,14 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+import sys
+from unittest.mock import MagicMock
+from unittest.mock import sentinel
import pytest
-import sys
-from ansible_collections.amazon.aws.tests.unit.compat.mock import MagicMock
-from ansible_collections.amazon.aws.tests.unit.compat.mock import sentinel
-import ansible_collections.amazon.aws.plugins.modules.ec2_instance as ec2_instance_module
import ansible_collections.amazon.aws.plugins.module_utils.arn as utils_arn
+import ansible_collections.amazon.aws.plugins.modules.ec2_instance as ec2_instance_module
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
try:
@@ -20,24 +18,29 @@ try:
except ImportError:
pass
-pytest.mark.skipif(not HAS_BOTO3, reason="test_determine_iam_role.py requires the python modules 'boto3' and 'botocore'")
+pytest.mark.skipif(
+ not HAS_BOTO3, reason="test_determine_iam_role.py requires the python modules 'boto3' and 'botocore'"
+)
-def _client_error(code='GenericError'):
+def _client_error(code="GenericError"):
return botocore.exceptions.ClientError(
- {'Error': {'Code': code, 'Message': 'Something went wrong'},
- 'ResponseMetadata': {'RequestId': '01234567-89ab-cdef-0123-456789abcdef'}},
- 'some_called_method')
+ {
+ "Error": {"Code": code, "Message": "Something went wrong"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
+ },
+ "some_called_method",
+ )
@pytest.fixture
def params_object():
params = {
- 'instance_role': None,
- 'exact_count': None,
- 'count': None,
- 'launch_template': None,
- 'instance_type': None,
+ "instance_role": None,
+ "exact_count": None,
+ "count": None,
+ "launch_template": None,
+ "instance_type": None,
}
return params
@@ -49,8 +52,8 @@ class FailJsonException(Exception):
@pytest.fixture
def ec2_instance(monkeypatch):
- monkeypatch.setattr(ec2_instance_module, 'parse_aws_arn', lambda arn: None)
- monkeypatch.setattr(ec2_instance_module, 'module', MagicMock())
+ monkeypatch.setattr(ec2_instance_module, "validate_aws_arn", lambda arn, service, resource_type: None)
+ monkeypatch.setattr(ec2_instance_module, "module", MagicMock())
ec2_instance_module.module.fail_json.side_effect = FailJsonException()
ec2_instance_module.module.fail_json_aws.side_effect = FailJsonException()
return ec2_instance_module
@@ -58,15 +61,15 @@ def ec2_instance(monkeypatch):
def test_determine_iam_role_arn(params_object, ec2_instance, monkeypatch):
# Revert the default monkey patch to make it simple to try passing a valid ARNs
- monkeypatch.setattr(ec2_instance, 'parse_aws_arn', utils_arn.parse_aws_arn)
+ monkeypatch.setattr(ec2_instance, "validate_aws_arn", utils_arn.validate_aws_arn)
# Simplest example, someone passes a valid instance profile ARN
- arn = ec2_instance.determine_iam_role('arn:aws:iam::123456789012:instance-profile/myprofile')
- assert arn == 'arn:aws:iam::123456789012:instance-profile/myprofile'
+ arn = ec2_instance.determine_iam_role("arn:aws:iam::123456789012:instance-profile/myprofile")
+ assert arn == "arn:aws:iam::123456789012:instance-profile/myprofile"
def test_determine_iam_role_name(params_object, ec2_instance):
- profile_description = {'InstanceProfile': {'Arn': sentinel.IAM_PROFILE_ARN}}
+ profile_description = {"InstanceProfile": {"Arn": sentinel.IAM_PROFILE_ARN}}
iam_client = MagicMock(**{"get_instance_profile.return_value": profile_description})
ec2_instance_module.module.client.return_value = iam_client
@@ -75,28 +78,28 @@ def test_determine_iam_role_name(params_object, ec2_instance):
def test_determine_iam_role_missing(params_object, ec2_instance):
- missing_exception = _client_error('NoSuchEntity')
+ missing_exception = _client_error("NoSuchEntity")
iam_client = MagicMock(**{"get_instance_profile.side_effect": missing_exception})
ec2_instance_module.module.client.return_value = iam_client
- with pytest.raises(FailJsonException) as exception:
- arn = ec2_instance.determine_iam_role(sentinel.IAM_PROFILE_NAME)
+ with pytest.raises(FailJsonException):
+ ec2_instance.determine_iam_role(sentinel.IAM_PROFILE_NAME)
assert ec2_instance_module.module.fail_json_aws.call_count == 1
assert ec2_instance_module.module.fail_json_aws.call_args.args[0] is missing_exception
- assert 'Could not find' in ec2_instance_module.module.fail_json_aws.call_args.kwargs['msg']
+ assert "Could not find" in ec2_instance_module.module.fail_json_aws.call_args.kwargs["msg"]
-@pytest.mark.skipif(sys.version_info < (3, 8), reason='call_args behaviour changed in Python 3.8')
+@pytest.mark.skipif(sys.version_info < (3, 8), reason="call_args behaviour changed in Python 3.8")
def test_determine_iam_role_missing(params_object, ec2_instance):
missing_exception = _client_error()
iam_client = MagicMock(**{"get_instance_profile.side_effect": missing_exception})
ec2_instance_module.module.client.return_value = iam_client
- with pytest.raises(FailJsonException) as exception:
- arn = ec2_instance.determine_iam_role(sentinel.IAM_PROFILE_NAME)
+ with pytest.raises(FailJsonException):
+ ec2_instance.determine_iam_role(sentinel.IAM_PROFILE_NAME)
assert ec2_instance_module.module.fail_json_aws.call_count == 1
assert ec2_instance_module.module.fail_json_aws.call_args.args[0] is missing_exception
- assert 'An error occurred while searching' in ec2_instance_module.module.fail_json_aws.call_args.kwargs['msg']
- assert 'Please try supplying the full ARN' in ec2_instance_module.module.fail_json_aws.call_args.kwargs['msg']
+ assert "An error occurred while searching" in ec2_instance_module.module.fail_json_aws.call_args.kwargs["msg"]
+ assert "Please try supplying the full ARN" in ec2_instance_module.module.fail_json_aws.call_args.kwargs["msg"]
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_expand_rules.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_expand_rules.py
new file mode 100644
index 000000000..1abfd526c
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_expand_rules.py
@@ -0,0 +1,240 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import sys
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.modules.ec2_security_group as ec2_security_group_module
+
+PORT_EXPANSION = [
+ ({"from_port": 83}, ({"from_port": 83, "to_port": None},)),
+ ({"to_port": 36}, ({"from_port": None, "to_port": 36},)),
+ ({"icmp_type": 90}, ({"from_port": 90, "to_port": None},)),
+ ({"icmp_type": 74, "icmp_code": 66}, ({"from_port": 74, "to_port": 66},)),
+ # Note: ports is explicitly a list of strings because we support "<port a>-<port b>"
+ ({"ports": ["1"]}, ({"from_port": 1, "to_port": 1},)),
+ ({"ports": ["41-85"]}, ({"from_port": 41, "to_port": 85},)),
+ (
+ {"ports": ["63", "74"]},
+ (
+ {"from_port": 63, "to_port": 63},
+ {"from_port": 74, "to_port": 74},
+ ),
+ ),
+ (
+ {"ports": ["97-30", "41-80"]},
+ (
+ {"from_port": 30, "to_port": 97},
+ {"from_port": 41, "to_port": 80},
+ ),
+ ),
+ (
+ {"ports": ["95", "67-79"]},
+ (
+ {"from_port": 95, "to_port": 95},
+ {"from_port": 67, "to_port": 79},
+ ),
+ ),
+ # There are legitimate cases with no port info
+ ({}, ({},)),
+]
+PORTS_EXPANSION = [
+ (["28"], [(28, 28)]),
+ (["80-83"], [(80, 83)]),
+ # We tolerate the order being backwards
+ (["83-80"], [(80, 83)]),
+ (["41", "1"], [(41, 41), (1, 1)]),
+ (["70", "39-0"], [(70, 70), (0, 39)]),
+ (["57-6", "31"], [(6, 57), (31, 31)]),
+ # https://github.com/ansible-collections/amazon.aws/pull/1241
+ (["-1"], [(-1, -1)]),
+]
+SOURCE_EXPANSION = [
+ (
+ {"cidr_ip": ["192.0.2.0/24"]},
+ ({"cidr_ip": "192.0.2.0/24"},),
+ ),
+ (
+ {"cidr_ipv6": ["2001:db8::/32"]},
+ ({"cidr_ipv6": "2001:db8::/32"},),
+ ),
+ (
+ {"group_id": ["sg-123456789"]},
+ ({"group_id": "sg-123456789"},),
+ ),
+ (
+ {"group_name": ["MyExampleGroupName"]},
+ ({"group_name": "MyExampleGroupName"},),
+ ),
+ (
+ {"ip_prefix": ["pl-123456abcde123456"]},
+ ({"ip_prefix": "pl-123456abcde123456"},),
+ ),
+ (
+ {"cidr_ip": ["192.0.2.0/24", "198.51.100.0/24"]},
+ (
+ {"cidr_ip": "192.0.2.0/24"},
+ {"cidr_ip": "198.51.100.0/24"},
+ ),
+ ),
+ (
+ {"cidr_ipv6": ["2001:db8::/32", "100::/64"]},
+ (
+ {"cidr_ipv6": "2001:db8::/32"},
+ {"cidr_ipv6": "100::/64"},
+ ),
+ ),
+ (
+ {"group_id": ["sg-123456789", "sg-abcdef1234"]},
+ (
+ {"group_id": "sg-123456789"},
+ {"group_id": "sg-abcdef1234"},
+ ),
+ ),
+ (
+ {"group_name": ["MyExampleGroupName", "AnotherExample"]},
+ (
+ {"group_name": "MyExampleGroupName"},
+ {"group_name": "AnotherExample"},
+ ),
+ ),
+ (
+ {"ip_prefix": ["pl-123456abcde123456", "pl-abcdef12345abcdef"]},
+ ({"ip_prefix": "pl-123456abcde123456"}, {"ip_prefix": "pl-abcdef12345abcdef"}),
+ ),
+ (
+ {
+ "cidr_ip": ["192.0.2.0/24"],
+ "cidr_ipv6": ["2001:db8::/32"],
+ "group_id": ["sg-123456789"],
+ "group_name": ["MyExampleGroupName"],
+ "ip_prefix": ["pl-123456abcde123456"],
+ },
+ (
+ {"cidr_ip": "192.0.2.0/24"},
+ {"cidr_ipv6": "2001:db8::/32"},
+ {"group_id": "sg-123456789"},
+ {"group_name": "MyExampleGroupName"},
+ {"ip_prefix": "pl-123456abcde123456"},
+ ),
+ ),
+ (
+ {
+ "cidr_ip": ["192.0.2.0/24", "198.51.100.0/24"],
+ "cidr_ipv6": ["2001:db8::/32", "100::/64"],
+ "group_id": ["sg-123456789", "sg-abcdef1234"],
+ "group_name": ["MyExampleGroupName", "AnotherExample"],
+ "ip_prefix": ["pl-123456abcde123456", "pl-abcdef12345abcdef"],
+ },
+ (
+ {"cidr_ip": "192.0.2.0/24"},
+ {"cidr_ip": "198.51.100.0/24"},
+ {"cidr_ipv6": "2001:db8::/32"},
+ {"cidr_ipv6": "100::/64"},
+ {"group_id": "sg-123456789"},
+ {"group_id": "sg-abcdef1234"},
+ {"group_name": "MyExampleGroupName"},
+ {"group_name": "AnotherExample"},
+ {"ip_prefix": "pl-123456abcde123456"},
+ {"ip_prefix": "pl-abcdef12345abcdef"},
+ ),
+ ),
+]
+
+RULE_EXPANSION = [
+ (
+ {"ports": ["24"], "cidr_ip": ["192.0.2.0/24"], "sentinel": sentinel.RULE_VALUE},
+ [
+ {"from_port": 24, "to_port": 24, "cidr_ip": "192.0.2.0/24", "sentinel": sentinel.RULE_VALUE},
+ ],
+ ),
+ (
+ {"ports": ["24", "50"], "cidr_ip": ["192.0.2.0/24", "198.51.100.0/24"], "sentinel": sentinel.RULE_VALUE},
+ [
+ {"from_port": 24, "to_port": 24, "cidr_ip": "192.0.2.0/24", "sentinel": sentinel.RULE_VALUE},
+ {"from_port": 24, "to_port": 24, "cidr_ip": "198.51.100.0/24", "sentinel": sentinel.RULE_VALUE},
+ {"from_port": 50, "to_port": 50, "cidr_ip": "192.0.2.0/24", "sentinel": sentinel.RULE_VALUE},
+ {"from_port": 50, "to_port": 50, "cidr_ip": "198.51.100.0/24", "sentinel": sentinel.RULE_VALUE},
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("rule, expected", PORT_EXPANSION)
+def test_expand_ports_from_rule(rule, expected):
+ assert ec2_security_group_module.expand_ports_from_rule(rule) == expected
+
+ # We shouldn't care about extra values lurking in the rule definition
+ rule["junk"] = sentinel.EXTRA_JUNK
+ assert ec2_security_group_module.expand_ports_from_rule(rule) == expected
+
+
+@pytest.mark.parametrize("rule, expected", SOURCE_EXPANSION)
+def test_expand_sources_from_rule(rule, expected):
+ assert ec2_security_group_module.expand_sources_from_rule(rule) == expected
+
+ # We shouldn't care about extra values lurking in the rule definition
+ rule["junk"] = sentinel.EXTRA_JUNK
+ assert ec2_security_group_module.expand_sources_from_rule(rule) == expected
+
+
+@pytest.mark.parametrize("rule, expected", PORTS_EXPANSION)
+def test_expand_ports_list(rule, expected):
+ assert ec2_security_group_module.expand_ports_list(rule) == expected
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 7),
+ reason="requires Python 3.7 or higher - sentinel doesn't behave well with deepcopy in Python 3.6",
+)
+@pytest.mark.parametrize("source_type", sorted(ec2_security_group_module.SOURCE_TYPES_ALL))
+def test_strip_rule_source(source_type):
+ rule = {source_type: sentinel.SOURCE_VALUE}
+ assert ec2_security_group_module._strip_rule(rule) == {}
+ assert rule == {source_type: sentinel.SOURCE_VALUE}
+
+ rule = {source_type: sentinel.SOURCE_VALUE, "sentinel": sentinel.SENTINEL_VALUE}
+ assert ec2_security_group_module._strip_rule(rule) == {"sentinel": sentinel.SENTINEL_VALUE}
+ assert rule == {source_type: sentinel.SOURCE_VALUE, "sentinel": sentinel.SENTINEL_VALUE}
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 7),
+ reason="requires Python 3.7 or higher - sentinel doesn't behave well with deepcopy in Python 3.6",
+)
+@pytest.mark.parametrize("port_type", sorted(ec2_security_group_module.PORT_TYPES_ALL))
+def test_strip_rule_port(port_type):
+ rule = {port_type: sentinel.PORT_VALUE}
+ assert ec2_security_group_module._strip_rule(rule) == {}
+ assert rule == {port_type: sentinel.PORT_VALUE}
+
+ rule = {port_type: sentinel.PORT_VALUE, "sentinel": sentinel.SENTINEL_VALUE}
+ assert ec2_security_group_module._strip_rule(rule) == {"sentinel": sentinel.SENTINEL_VALUE}
+ assert rule == {port_type: sentinel.PORT_VALUE, "sentinel": sentinel.SENTINEL_VALUE}
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 7),
+ reason="requires Python 3.7 or higher - sentinel doesn't behave well with deepcopy in Python 3.6",
+)
+@pytest.mark.parametrize("rule, expected", RULE_EXPANSION)
+def test_rule_expand(rule, expected):
+ assert ec2_security_group_module.expand_rule(rule) == expected
+
+
+##########################################################
+# Examples where we explicitly expect to raise an exception
+
+
+def test_expand_ports_list_bad():
+ with pytest.raises(ec2_security_group_module.SecurityGroupError):
+ ec2_security_group_module.expand_ports_list(["junk"])
+
+
+def test_expand_sources_from_rule_bad():
+ with pytest.raises(ec2_security_group_module.SecurityGroupError):
+ ec2_security_group_module.expand_sources_from_rule(dict())
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_formatting.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_formatting.py
new file mode 100644
index 000000000..358512a00
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_formatting.py
@@ -0,0 +1,239 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.modules.ec2_security_group as ec2_security_group_module
+
+SORT_ORDER = [
+ (dict(), dict()),
+ (
+ dict(ip_permissions=[], ip_permissions_egress=[]),
+ dict(ip_permissions=[], ip_permissions_egress=[]),
+ ),
+ (
+ dict(
+ ip_permissions=[
+ dict(
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[
+ dict(cidr_ipv6="2001:DB8:8000::/34"),
+ dict(cidr_ipv6="2001:DB8:4000::/34"),
+ ],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ ip_protocol="-1",
+ ip_ranges=[
+ dict(cidr_ip="198.51.100.0/24"),
+ dict(cidr_ip="192.0.2.0/24"),
+ ],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ from_port="22",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ to_port="22",
+ user_id_group_pairs=[
+ dict(group_id="sg-3950599b", user_id="123456789012"),
+ dict(group_id="sg-fbfd1e3a", user_id="012345678901"),
+ dict(group_id="sg-00ec640f", user_id="012345678901"),
+ ],
+ ),
+ dict(
+ from_port=38,
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[
+ dict(prefix_list_id="pl-2263adef"),
+ dict(prefix_list_id="pl-0a5fccee"),
+ dict(prefix_list_id="pl-65911ba9"),
+ ],
+ to_port=38,
+ user_id_group_pairs=[],
+ ),
+ ],
+ ip_permissions_egress=[
+ dict(
+ ip_protocol="-1",
+ ip_ranges=[
+ dict(cidr_ip="198.51.100.0/24"),
+ dict(cidr_ip="192.0.2.0/24"),
+ ],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ from_port=443,
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ to_port=443,
+ user_id_group_pairs=[
+ dict(group_id="sg-fbfd1e3a", user_id="012345678901"),
+ dict(group_id="sg-00ec640f", user_id="012345678901"),
+ ],
+ ),
+ ],
+ ),
+ dict(
+ ip_permissions=[
+ dict(
+ ip_protocol="-1",
+ ip_ranges=[
+ dict(cidr_ip="192.0.2.0/24"),
+ dict(cidr_ip="198.51.100.0/24"),
+ ],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[
+ dict(cidr_ipv6="2001:DB8:4000::/34"),
+ dict(cidr_ipv6="2001:DB8:8000::/34"),
+ ],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ from_port=38,
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[
+ dict(prefix_list_id="pl-0a5fccee"),
+ dict(prefix_list_id="pl-2263adef"),
+ dict(prefix_list_id="pl-65911ba9"),
+ ],
+ to_port=38,
+ user_id_group_pairs=[],
+ ),
+ dict(
+ from_port="22",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ to_port="22",
+ user_id_group_pairs=[
+ dict(group_id="sg-00ec640f", user_id="012345678901"),
+ dict(group_id="sg-3950599b", user_id="123456789012"),
+ dict(group_id="sg-fbfd1e3a", user_id="012345678901"),
+ ],
+ ),
+ ],
+ ip_permissions_egress=[
+ dict(
+ ip_protocol="-1",
+ ip_ranges=[
+ dict(cidr_ip="192.0.2.0/24"),
+ dict(cidr_ip="198.51.100.0/24"),
+ ],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ user_id_group_pairs=[],
+ ),
+ dict(
+ from_port=443,
+ ip_protocol="tcp",
+ ip_ranges=[],
+ ipv6_ranges=[],
+ prefix_list_ids=[],
+ to_port=443,
+ user_id_group_pairs=[
+ dict(group_id="sg-00ec640f", user_id="012345678901"),
+ dict(group_id="sg-fbfd1e3a", user_id="012345678901"),
+ ],
+ ),
+ ],
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize("group, expected", SORT_ORDER)
+def test_sort_security_group(group, expected):
+ assert ec2_security_group_module.sort_security_group(group) == expected
+
+ # We shouldn't care about extra values lurking in the security group definition
+ group["junk"] = sentinel.EXTRA_JUNK
+ expected["junk"] = sentinel.EXTRA_JUNK
+ assert ec2_security_group_module.sort_security_group(group) == expected
+
+
+def test_get_rule_sort_key():
+ # Random text, to try and ensure the content of the string doesn't affect the key returned
+ dict_to_sort = dict(
+ cidr_ip="MtY0d3Ps6ePsMM0zB18g",
+ cidr_ipv6="ffbCwK2xhCsy8cyXqHuz",
+ prefix_list_id="VXKCoW296XxIRiBrTUw8",
+ group_id="RZpolpZ5wYPPpbqVo1Db",
+ sentinel=sentinel.EXTRA_RULE_KEY,
+ )
+
+ # Walk through through the keys we use and check that they have the priority we expect
+ for key_name in ["cidr_ip", "cidr_ipv6", "prefix_list_id", "group_id"]:
+ assert ec2_security_group_module.get_rule_sort_key(dict_to_sort) == dict_to_sort[key_name]
+ # Remove the current key so that the next time round another key will have priority
+ dict_to_sort.pop(key_name)
+
+ assert dict_to_sort == {"sentinel": sentinel.EXTRA_RULE_KEY}
+ assert ec2_security_group_module.get_rule_sort_key(dict_to_sort) is None
+
+
+def test_get_ip_permissions_sort_key():
+ dict_to_sort = dict(
+ ip_ranges=[
+ dict(cidr_ip="198.51.100.0/24", original_index=0),
+ dict(cidr_ip="192.0.2.0/24", original_index=1),
+ dict(cidr_ip="203.0.113.0/24", original_index=2),
+ ],
+ ipv6_ranges=[
+ dict(cidr_ipv6="2001:DB8:4000::/34", original_index=0),
+ dict(cidr_ipv6="2001:DB8:0000::/34", original_index=1),
+ dict(cidr_ipv6="2001:DB8:8000::/34", original_index=2),
+ ],
+ prefix_list_ids=[
+ dict(prefix_list_id="pl-2263adef", original_index=0),
+ dict(prefix_list_id="pl-0a5fccee", original_index=1),
+ dict(prefix_list_id="pl-65911ba9", original_index=2),
+ ],
+ user_id_group_pairs=[
+ dict(group_id="sg-3950599b", original_index=0),
+ dict(group_id="sg-fbfd1e3a", original_index=1),
+ dict(group_id="sg-00ec640f", original_index=2),
+ ],
+ sentinel=sentinel.EXTRA_RULE_KEY,
+ )
+
+ expected_keys = dict(
+ ip_ranges="ipv4:192.0.2.0/24",
+ ipv6_ranges="ipv6:2001:DB8:0000::/34",
+ prefix_list_ids="pl:pl-0a5fccee",
+ user_id_group_pairs="ugid:sg-00ec640f",
+ )
+
+ # Walk through through the keys we use and check that they have the priority we expect
+ for key_name in ["ip_ranges", "ipv6_ranges", "prefix_list_ids", "user_id_group_pairs"]:
+ sort_key = ec2_security_group_module.get_ip_permissions_sort_key(dict_to_sort)
+ assert sort_key == expected_keys[key_name]
+ # Remove the current key so that the next time round another key will have priority
+ dict_to_sort.pop(key_name)
+
+ assert dict_to_sort == {"sentinel": sentinel.EXTRA_RULE_KEY}
+ assert ec2_security_group_module.get_ip_permissions_sort_key(dict_to_sort) is None
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_get_target_from_rule.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_get_target_from_rule.py
new file mode 100644
index 000000000..34fa8de1a
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_get_target_from_rule.py
@@ -0,0 +1,99 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from copy import deepcopy
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.modules.ec2_security_group as ec2_security_group_module
+
+
+@pytest.fixture
+def ec2_security_group(monkeypatch):
+ # monkey patches various ec2_security_group module functions, we'll separately test the operation of
+ # these functions, we just care that it's passing the results into the right place in the
+ # instance spec.
+ monkeypatch.setattr(ec2_security_group_module, "current_account_id", sentinel.CURRENT_ACCOUNT_ID)
+ return ec2_security_group_module
+
+
+def test_target_from_rule_with_group_id_local_group(ec2_security_group):
+ groups = dict()
+ original_groups = deepcopy(groups)
+ rule_type, target, created = ec2_security_group._target_from_rule_with_group_id(
+ dict(group_id="sg-123456789abcdef01"),
+ groups,
+ )
+ assert groups == original_groups
+ assert rule_type == "group"
+ assert created is False
+ assert target[0] is sentinel.CURRENT_ACCOUNT_ID
+ assert target[1] == "sg-123456789abcdef01"
+ assert target[2] is None
+
+
+def test_target_from_rule_with_group_id_peer_group(ec2_security_group):
+ groups = dict()
+ rule_type, target, created = ec2_security_group._target_from_rule_with_group_id(
+ dict(group_id="123456789012/sg-123456789abcdef02/example-group-name"),
+ groups,
+ )
+ assert rule_type == "group"
+ assert created is False
+ assert target[0] == "123456789012"
+ assert target[1] == "sg-123456789abcdef02"
+ assert target[2] is None
+
+ assert sorted(groups.keys()) == ["example-group-name", "sg-123456789abcdef02"]
+ rule_by_id = groups["sg-123456789abcdef02"]
+ rule_by_name = groups["example-group-name"]
+
+ assert rule_by_id is rule_by_name
+ assert rule_by_id["UserId"] == "123456789012"
+ assert rule_by_id["GroupId"] == "sg-123456789abcdef02"
+ assert rule_by_id["GroupName"] == "example-group-name"
+
+
+def test_target_from_rule_with_group_id_elb(ec2_security_group):
+ groups = dict()
+ rule_type, target, created = ec2_security_group._target_from_rule_with_group_id(
+ dict(group_id="amazon-elb/amazon-elb-sg"),
+ groups,
+ )
+ assert rule_type == "group"
+ assert created is False
+ assert target[0] == "amazon-elb"
+ assert target[1] is None
+ assert target[2] == "amazon-elb-sg"
+
+ assert "amazon-elb-sg" in groups.keys()
+ rule_by_name = groups["amazon-elb-sg"]
+
+ assert rule_by_name["UserId"] == "amazon-elb"
+ assert rule_by_name["GroupId"] is None
+ assert rule_by_name["GroupName"] == "amazon-elb-sg"
+
+
+def test_target_from_rule_with_group_id_elb_with_sg(ec2_security_group):
+ groups = dict()
+ rule_type, target, created = ec2_security_group._target_from_rule_with_group_id(
+ dict(group_id="amazon-elb/sg-5a9c116a/amazon-elb-sg"),
+ groups,
+ )
+ assert rule_type == "group"
+ assert created is False
+ assert target[0] == "amazon-elb"
+ assert target[1] is None
+ assert target[2] == "amazon-elb-sg"
+
+ assert sorted(groups.keys()) == ["amazon-elb-sg", "sg-5a9c116a"]
+ rule_by_id = groups["sg-5a9c116a"]
+ rule_by_name = groups["amazon-elb-sg"]
+
+ assert rule_by_id is rule_by_name
+ assert rule_by_id["UserId"] == "amazon-elb"
+ assert rule_by_id["GroupId"] == "sg-5a9c116a"
+ assert rule_by_id["GroupName"] == "amazon-elb-sg"
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_ip.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_ip.py
new file mode 100644
index 000000000..eb2de7596
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_ip.py
@@ -0,0 +1,85 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import warnings
+from unittest.mock import MagicMock
+from unittest.mock import sentinel
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.modules.ec2_security_group as ec2_security_group_module
+
+
+@pytest.fixture
+def aws_module():
+ aws_module = MagicMock()
+ aws_module.warn = warnings.warn
+ return aws_module
+
+
+@pytest.fixture
+def ec2_security_group(monkeypatch):
+ # monkey patches various ec2_security_group module functions, we'll separately test the operation of
+ # these functions, we just care that it's passing the results into the right place in the
+ # instance spec.
+ monkeypatch.setattr(ec2_security_group_module, "current_account_id", sentinel.CURRENT_ACCOUNT_ID)
+ return ec2_security_group_module
+
+
+IPS_GOOD = [
+ (
+ "192.0.2.2",
+ "192.0.2.2",
+ ),
+ (
+ "192.0.2.1/32",
+ "192.0.2.1/32",
+ ),
+ (
+ "192.0.2.1/255.255.255.255",
+ "192.0.2.1/32",
+ ),
+ (
+ "192.0.2.0/24",
+ "192.0.2.0/24",
+ ),
+ (
+ "192.0.2.0/255.255.255.255",
+ "192.0.2.0/32",
+ ),
+ (
+ "2001:db8::1/128",
+ "2001:db8::1/128",
+ ),
+ (
+ "2001:db8::/32",
+ "2001:db8::/32",
+ ),
+ ("2001:db8:fe80:b897:8990:8a7c:99bf:323d/128", "2001:db8:fe80:b897:8990:8a7c:99bf:323d/128"),
+]
+
+IPS_WARN = [
+ ("192.0.2.1/24", "192.0.2.0/24", "One of your CIDR addresses"),
+ ("2001:DB8::1/32", "2001:DB8::/32", "One of your IPv6 CIDR addresses"),
+ ("2001:db8:fe80:b897:8990:8a7c:99bf:323d/64", "2001:db8:fe80:b897::/64", "One of your IPv6 CIDR addresses"),
+]
+
+
+@pytest.mark.parametrize("ip,expected", IPS_GOOD)
+def test_validate_ip_no_warn(ec2_security_group, aws_module, ip, expected):
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ result = ec2_security_group.validate_ip(aws_module, ip)
+
+ assert result == expected
+
+
+@pytest.mark.parametrize("ip,expected,warn_msg", IPS_WARN)
+def test_validate_ip_warn(ec2_security_group, aws_module, ip, warn_msg, expected):
+ with pytest.warns(UserWarning, match=warn_msg) as recorded:
+ result = ec2_security_group.validate_ip(aws_module, ip)
+
+ assert len(recorded) == 1
+ assert result == expected
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_rule.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_rule.py
new file mode 100644
index 000000000..9949c1b5c
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/ec2_security_group/test_validate_rule.py
@@ -0,0 +1,100 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from copy import deepcopy
+
+import pytest
+
+import ansible_collections.amazon.aws.plugins.modules.ec2_security_group as ec2_security_group_module
+
+VALID_RULES = [
+ dict(
+ proto="all",
+ ),
+ dict(
+ proto="tcp",
+ from_port="1",
+ to_port="65535",
+ ),
+ dict(
+ proto="icmpv6",
+ from_port="-1",
+ to_port="-1",
+ ),
+ dict(
+ proto="icmp",
+ from_port="-1",
+ to_port="-1",
+ ),
+ dict(proto="icmpv6", icmp_type="8", icmp_code="1"),
+ dict(proto="icmpv6", icmp_code="1"),
+ dict(proto="icmpv6", icmp_type="8"),
+ dict(proto="icmp", icmp_type="8", icmp_code="1"),
+ dict(proto="icmp", icmp_code="1"),
+ dict(proto="icmp", icmp_type="8"),
+]
+
+INVALID_RULES = [
+ (
+ dict(
+ proto="tcp",
+ icmp_code="1",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+ (
+ dict(
+ proto="tcp",
+ icmp_type="8",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+ (
+ dict(
+ proto="tcp",
+ icmp_type="8",
+ icmp_code="1",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+ (
+ dict(
+ proto="all",
+ icmp_code="1",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+ (
+ dict(
+ proto="all",
+ icmp_type="8",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+ (
+ dict(
+ proto="all",
+ icmp_type="8",
+ icmp_code="1",
+ ),
+ r"Specify proto: icmp or icmpv6",
+ ),
+]
+
+
+@pytest.mark.parametrize("rule,error_msg", INVALID_RULES)
+def test_validate_rule_invalid(rule, error_msg):
+ original_rule = deepcopy(rule)
+ with pytest.raises(ec2_security_group_module.SecurityGroupError, match=error_msg):
+ ec2_security_group_module.validate_rule(rule)
+ assert original_rule == rule
+
+
+@pytest.mark.parametrize("rule", VALID_RULES)
+def test_validate_rule_valid(rule):
+ original_rule = deepcopy(rule)
+ ec2_security_group_module.validate_rule(rule)
+ # validate_rule shouldn't change the rule
+ assert original_rule == rule
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/certs/__init__.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/certs/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/fixtures/certs/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_backup_restore_job_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_backup_restore_job_info.py
new file mode 100644
index 000000000..51c495e30
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_backup_restore_job_info.py
@@ -0,0 +1,146 @@
+# (c) 2022 Red Hat Inc.
+
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import patch
+
+import pytest
+
+from ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict
+
+from ansible_collections.amazon.aws.plugins.modules import backup_restore_job_info
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.backup_restore_job_info"
+
+
+@pytest.mark.parametrize(
+ "account_id, status, created_before, created_after, completed_before, completed_after,expected",
+ [
+ ("", "", "", "", "", "", {}),
+ ("123456789012", "", "", "", "", "", {"ByAccountId": "123456789012"}),
+ (
+ "123456789012",
+ "COMPLETED",
+ "",
+ "",
+ "",
+ "",
+ {"ByAccountId": "123456789012", "ByStatus": "COMPLETED"},
+ ),
+ ],
+)
+def test_build_request_args(
+ account_id, status, created_before, created_after, completed_before, completed_after, expected
+):
+ assert (
+ backup_restore_job_info.build_request_args(
+ account_id, status, created_before, created_after, completed_before, completed_after
+ )
+ == expected
+ )
+
+
+def test__describe_restore_job():
+ connection = MagicMock()
+ module = MagicMock()
+
+ restore_job_id = "52BEE289-xxxx-xxxx-xxxx-47DCAA2E7ACD"
+ restore_job_info = {
+ "AccountId": "123456789012",
+ "BackupSizeInBytes": "8589934592",
+ "CompletionDate": "2023-03-13T15:53:07.172000-07:00",
+ "CreatedResourceArn": "arn:aws:ec2:us-east-2:123456789012:instance/i-01234567ec51af3f",
+ "CreationDate": "2023-03-13T15:53:07.172000-07:00",
+ "IamRoleArn": "arn:aws:iam::123456789012:role/service-role/AWSBackupDefaultServiceRole",
+ "PercentDone": "0.00%",
+ "RecoveryPointArn": "arn:aws:ec2:us-east-2::image/ami-01234567ec51af3f",
+ "ResourceType": "EC2",
+ "RestoreJobId": "52BEE289-xxxx-xxxx-xxxx-47DCAA2E7ACD",
+ "Status": "COMPLETED",
+ }
+
+ connection.describe_restore_job.return_value = restore_job_info
+
+ result = backup_restore_job_info._describe_restore_job(connection, module, restore_job_id)
+
+ assert result == [camel_dict_to_snake_dict(restore_job_info)]
+ connection.describe_restore_job.assert_called_with(RestoreJobId=restore_job_id)
+ connection.describe_restore_job.call_count == 1
+
+
+def test__list_restore_jobs():
+ connection = MagicMock()
+ conn_paginator = MagicMock()
+ paginate = MagicMock()
+
+ request_args = {"ByAccountId": "123456789012"}
+
+ restore_job = {
+ "AccountId": "123456789012",
+ "BackupSizeInBytes": "8589934592",
+ "CompletionDate": "2023-03-13T15:53:07.172000-07:00",
+ "CreatedResourceArn": "arn:aws:ec2:us-east-2:123456789012:instance/i-01234567ec51af3f",
+ "CreationDate": "2023-03-13T15:53:07.172000-07:00",
+ "IamRoleArn": "arn:aws:iam::123456789012:role/service-role/AWSBackupDefaultServiceRole",
+ "PercentDone": "0.00%",
+ "RecoveryPointArn": "arn:aws:ec2:us-east-2::image/ami-01234567ec51af3f",
+ "ResourceType": "EC2",
+ "RestoreJobId": "52BEE289-xxxx-xxxx-xxxx-47DCAA2E7ACD",
+ "Status": "COMPLETED",
+ }
+
+ connection.get_paginator.return_value = conn_paginator
+ conn_paginator.paginate.return_value = paginate
+
+ paginate.build_full_result.return_value = {"RestoreJobs": [restore_job]}
+
+ result = backup_restore_job_info._list_restore_jobs(connection=connection, **request_args)
+
+ assert result == paginate.build_full_result.return_value
+ connection.get_paginator.assert_called_with("list_restore_jobs")
+ conn_paginator.paginate.assert_called_with(**request_args)
+
+
+@patch(module_name + "._list_restore_jobs")
+def test_list_restore_jobs(m__list_restore_jobs):
+ connection = MagicMock()
+ module = MagicMock()
+
+ request_args = {"ByAccountId": "123456789012"}
+
+ m__list_restore_jobs.return_value = {
+ "RestoreJobs": [
+ {
+ "AccountId": "123456789012",
+ "BackupSizeInBytes": "8589934592",
+ "CompletionDate": "2023-03-13T15:53:07.172000-07:00",
+ "CreatedResourceArn": "arn:aws:ec2:us-east-2:123456789012:instance/i-01234567ec51af3f",
+ "CreationDate": "2023-03-13T15:53:07.172000-07:00",
+ "IamRoleArn": "arn:aws:iam::123456789012:role/service-role/AWSBackupDefaultServiceRole",
+ "PercentDone": "0.00%",
+ "RecoveryPointArn": "arn:aws:ec2:us-east-2::image/ami-01234567ec51af3f",
+ "ResourceType": "EC2",
+ "RestoreJobId": "52BEE289-xxxx-xxxx-xxxx-47DCAA2E7ACD",
+ "Status": "COMPLETED",
+ }
+ ]
+ }
+
+ list_restore_jobs_result = backup_restore_job_info.list_restore_jobs(connection, module, request_args)
+
+ assert m__list_restore_jobs.call_count == 1
+ m__list_restore_jobs.assert_called_with(connection, **request_args)
+ assert len(list_restore_jobs_result) == 1
+
+
+@patch(module_name + ".AnsibleAWSModule")
+def test_main_success(m_AnsibleAWSModule):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+
+ backup_restore_job_info.main()
+
+ m_module.client.assert_called_with("backup")
+ m_module.exit_json.assert_called_with(changed=False, restore_jobs=[{}])
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_cloudformation.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_cloudformation.py
index f46bc1113..fd0b7ca75 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_cloudformation.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_cloudformation.py
@@ -3,21 +3,23 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import pytest
-# Magic...
-from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import maybe_sleep, placeboify # pylint: disable=unused-import
-
from ansible_collections.amazon.aws.plugins.module_utils.botocore import boto_exception
-from ansible_collections.amazon.aws.plugins.module_utils.modules import _RetryingBotoClientWrapper
from ansible_collections.amazon.aws.plugins.module_utils.retries import AWSRetry
-
+from ansible_collections.amazon.aws.plugins.module_utils.retries import RetryingBotoClientWrapper
from ansible_collections.amazon.aws.plugins.modules import cloudformation as cfn_module
+# isort: off
+# Magic...
+# pylint: disable-next=unused-import
+from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import maybe_sleep
+
+# pylint: disable-next=unused-import
+from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import placeboify
+
+# isort: on
+
basic_yaml_tpl = """
---
AWSTemplateFormatVersion: '2010-09-09'
@@ -61,167 +63,153 @@ Resources:
default_events_limit = 10
-class FakeModule(object):
+class FakeModule:
def __init__(self, **kwargs):
self.params = kwargs
def fail_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('FAIL')
+ raise Exception("FAIL")
def fail_json_aws(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('FAIL')
+ raise Exception("FAIL")
def exit_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('EXIT')
+ raise Exception("EXIT")
def _create_wrapped_client(placeboify):
- connection = placeboify.client('cloudformation')
+ connection = placeboify.client("cloudformation")
retry_decorator = AWSRetry.jittered_backoff()
- wrapped_conn = _RetryingBotoClientWrapper(connection, retry_decorator)
+ wrapped_conn = RetryingBotoClientWrapper(connection, retry_decorator)
return wrapped_conn
def test_invalid_template_json(placeboify):
connection = _create_wrapped_client(placeboify)
params = {
- 'StackName': 'ansible-test-wrong-json',
- 'TemplateBody': bad_json_tpl,
+ "StackName": "ansible-test-wrong-json",
+ "TemplateBody": bad_json_tpl,
}
m = FakeModule(disable_rollback=False)
with pytest.raises(Exception) as exc_info:
cfn_module.create_stack(m, params, connection, default_events_limit)
- pytest.fail('Expected malformed JSON to have caused the call to fail')
+ pytest.fail("Expected malformed JSON to have caused the call to fail")
- assert exc_info.match('FAIL')
+ assert exc_info.match("FAIL")
assert "ValidationError" in boto_exception(m.exit_args[0])
def test_client_request_token_s3_stack(maybe_sleep, placeboify):
connection = _create_wrapped_client(placeboify)
params = {
- 'StackName': 'ansible-test-client-request-token-yaml',
- 'TemplateBody': basic_yaml_tpl,
- 'ClientRequestToken': '3faf3fb5-b289-41fc-b940-44151828f6cf',
+ "StackName": "ansible-test-client-request-token-yaml",
+ "TemplateBody": basic_yaml_tpl,
+ "ClientRequestToken": "3faf3fb5-b289-41fc-b940-44151828f6cf",
}
m = FakeModule(disable_rollback=False)
result = cfn_module.create_stack(m, params, connection, default_events_limit)
- assert result['changed']
- assert len(result['events']) > 1
+ assert result["changed"]
+ assert len(result["events"]) > 1
# require that the final recorded stack state was CREATE_COMPLETE
# events are retrieved newest-first, so 0 is the latest
- assert 'CREATE_COMPLETE' in result['events'][0]
- connection.delete_stack(StackName='ansible-test-client-request-token-yaml')
+ assert "CREATE_COMPLETE" in result["events"][0]
+ connection.delete_stack(StackName="ansible-test-client-request-token-yaml")
def test_basic_s3_stack(maybe_sleep, placeboify):
connection = _create_wrapped_client(placeboify)
- params = {
- 'StackName': 'ansible-test-basic-yaml',
- 'TemplateBody': basic_yaml_tpl
- }
+ params = {"StackName": "ansible-test-basic-yaml", "TemplateBody": basic_yaml_tpl}
m = FakeModule(disable_rollback=False)
result = cfn_module.create_stack(m, params, connection, default_events_limit)
- assert result['changed']
- assert len(result['events']) > 1
+ assert result["changed"]
+ assert len(result["events"]) > 1
# require that the final recorded stack state was CREATE_COMPLETE
# events are retrieved newest-first, so 0 is the latest
- assert 'CREATE_COMPLETE' in result['events'][0]
- connection.delete_stack(StackName='ansible-test-basic-yaml')
+ assert "CREATE_COMPLETE" in result["events"][0]
+ connection.delete_stack(StackName="ansible-test-basic-yaml")
def test_delete_nonexistent_stack(maybe_sleep, placeboify):
connection = _create_wrapped_client(placeboify)
# module is only used if we threw an unexpected error
module = None
- result = cfn_module.stack_operation(module, connection, 'ansible-test-nonexist', 'DELETE', default_events_limit)
- assert result['changed']
- assert 'Stack does not exist.' in result['log']
+ result = cfn_module.stack_operation(module, connection, "ansible-test-nonexist", "DELETE", default_events_limit)
+ assert result["changed"]
+ assert "Stack does not exist." in result["log"]
def test_get_nonexistent_stack(placeboify):
connection = _create_wrapped_client(placeboify)
# module is only used if we threw an unexpected error
module = None
- assert cfn_module.get_stack_facts(module, connection, 'ansible-test-nonexist') is None
+ assert cfn_module.get_stack_facts(module, connection, "ansible-test-nonexist") is None
def test_missing_template_body():
m = FakeModule()
with pytest.raises(Exception) as exc_info:
- cfn_module.create_stack(
- module=m,
- stack_params={},
- cfn=None,
- events_limit=default_events_limit
- )
- pytest.fail('Expected module to have failed with no template')
-
- assert exc_info.match('FAIL')
+ cfn_module.create_stack(module=m, stack_params={}, cfn=None, events_limit=default_events_limit)
+ pytest.fail("Expected module to have failed with no template")
+
+ assert exc_info.match("FAIL")
assert not m.exit_args
- assert "Either 'template', 'template_body' or 'template_url' is required when the stack does not exist." == m.exit_kwargs['msg']
+ assert (
+ "Either 'template', 'template_body' or 'template_url' is required when the stack does not exist."
+ == m.exit_kwargs["msg"]
+ )
def test_on_create_failure_delete(maybe_sleep, placeboify):
m = FakeModule(
- on_create_failure='DELETE',
+ on_create_failure="DELETE",
disable_rollback=False,
)
connection = _create_wrapped_client(placeboify)
- params = {
- 'StackName': 'ansible-test-on-create-failure-delete',
- 'TemplateBody': failing_yaml_tpl
- }
+ params = {"StackName": "ansible-test-on-create-failure-delete", "TemplateBody": failing_yaml_tpl}
result = cfn_module.create_stack(m, params, connection, default_events_limit)
- assert result['changed']
- assert result['failed']
- assert len(result['events']) > 1
+ assert result["changed"]
+ assert result["failed"]
+ assert len(result["events"]) > 1
# require that the final recorded stack state was DELETE_COMPLETE
# events are retrieved newest-first, so 0 is the latest
- assert 'DELETE_COMPLETE' in result['events'][0]
+ assert "DELETE_COMPLETE" in result["events"][0]
def test_on_create_failure_rollback(maybe_sleep, placeboify):
m = FakeModule(
- on_create_failure='ROLLBACK',
+ on_create_failure="ROLLBACK",
disable_rollback=False,
)
connection = _create_wrapped_client(placeboify)
- params = {
- 'StackName': 'ansible-test-on-create-failure-rollback',
- 'TemplateBody': failing_yaml_tpl
- }
+ params = {"StackName": "ansible-test-on-create-failure-rollback", "TemplateBody": failing_yaml_tpl}
result = cfn_module.create_stack(m, params, connection, default_events_limit)
- assert result['changed']
- assert result['failed']
- assert len(result['events']) > 1
+ assert result["changed"]
+ assert result["failed"]
+ assert len(result["events"]) > 1
# require that the final recorded stack state was ROLLBACK_COMPLETE
# events are retrieved newest-first, so 0 is the latest
- assert 'ROLLBACK_COMPLETE' in result['events'][0]
- connection.delete_stack(StackName=params['StackName'])
+ assert "ROLLBACK_COMPLETE" in result["events"][0]
+ connection.delete_stack(StackName=params["StackName"])
def test_on_create_failure_do_nothing(maybe_sleep, placeboify):
m = FakeModule(
- on_create_failure='DO_NOTHING',
+ on_create_failure="DO_NOTHING",
disable_rollback=False,
)
connection = _create_wrapped_client(placeboify)
- params = {
- 'StackName': 'ansible-test-on-create-failure-do-nothing',
- 'TemplateBody': failing_yaml_tpl
- }
+ params = {"StackName": "ansible-test-on-create-failure-do-nothing", "TemplateBody": failing_yaml_tpl}
result = cfn_module.create_stack(m, params, connection, default_events_limit)
- assert result['changed']
- assert result['failed']
- assert len(result['events']) > 1
+ assert result["changed"]
+ assert result["failed"]
+ assert len(result["events"]) > 1
# require that the final recorded stack state was CREATE_FAILED
# events are retrieved newest-first, so 0 is the latest
- assert 'CREATE_FAILED' in result['events'][0]
- connection.delete_stack(StackName=params['StackName'])
+ assert "CREATE_FAILED" in result["events"][0]
+ connection.delete_stack(StackName=params["StackName"])
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami.py
index 5e8140d4a..b1e23451b 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami.py
@@ -1,7 +1,9 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from unittest.mock import MagicMock, Mock, patch, call
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
import pytest
@@ -28,17 +30,371 @@ def test_create_image_uefi_data(m_get_image_by_id):
"uefi_data": "QU1aTlVFRkk9xcN0AAAAAHj5a7fZ9+3aT2gcVRgA8Ek3NipiPST0pCiCIlTJtj20FzENCcQa",
}
- ec2_ami.create_image(module, connection)
+ ec2_ami.CreateImage.do(module, connection, None)
assert connection.register_image.call_count == 1
connection.register_image.assert_has_calls(
[
call(
aws_retry=True,
- Description=None,
Name="my-image",
BootMode="uefi",
TpmSupport="v2.0",
- UefiData="QU1aTlVFRkk9xcN0AAAAAHj5a7fZ9+3aT2gcVRgA8Ek3NipiPST0pCiCIlTJtj20FzENCcQa"
+ UefiData="QU1aTlVFRkk9xcN0AAAAAHj5a7fZ9+3aT2gcVRgA8Ek3NipiPST0pCiCIlTJtj20FzENCcQa",
)
]
)
+
+
+def test_get_block_device_mapping_virtual_name():
+ image = {"block_device_mappings": [{"device_name": "/dev/sdc", "virtual_name": "ephemeral0"}]}
+ block_device = ec2_ami.get_block_device_mapping(image)
+ assert block_device == {"/dev/sdc": {"virtual_name": "ephemeral0"}}
+
+
+def test_get_image_by_id_found():
+ connection = MagicMock()
+
+ connection.describe_images.return_value = {"Images": [{"ImageId": "ami-0c7a795306730b288"}]}
+
+ image = ec2_ami.get_image_by_id(connection, "ami-0c7a795306730b288")
+ assert image["ImageId"] == "ami-0c7a795306730b288"
+ assert connection.describe_images.call_count == 1
+ assert connection.describe_image_attribute.call_count == 2
+ connection.describe_images.assert_has_calls(
+ [
+ call(
+ aws_retry=True,
+ ImageIds=["ami-0c7a795306730b288"],
+ )
+ ]
+ )
+
+
+def test_get_image_by_too_many():
+ connection = MagicMock()
+
+ connection.describe_images.return_value = {
+ "Images": [
+ {"ImageId": "ami-0c7a795306730b288"},
+ {"ImageId": "ami-0c7a795306730b288"},
+ ]
+ }
+
+ with pytest.raises(ec2_ami.Ec2AmiFailure):
+ ec2_ami.get_image_by_id(connection, "ami-0c7a795306730b288")
+
+
+def test_get_image_missing():
+ connection = MagicMock()
+
+ connection.describe_images.return_value = {"Images": []}
+
+ image = ec2_ami.get_image_by_id(connection, "ami-0c7a795306730b288")
+ assert image is None
+ assert connection.describe_images.call_count == 1
+ connection.describe_images.assert_has_calls(
+ [
+ call(
+ aws_retry=True,
+ ImageIds=["ami-0c7a795306730b288"],
+ )
+ ]
+ )
+
+
+@patch(
+ module_name + ".get_image_by_id",
+)
+def test_create_image_minimal(m_get_image_by_id):
+ module = MagicMock()
+ connection = MagicMock()
+
+ m_get_image_by_id.return_value = {"ImageId": "ami-0c7a795306730b288"}
+ module.params = {
+ "name": "my-image",
+ "instance_id": "i-123456789",
+ "image_id": "ami-0c7a795306730b288",
+ }
+ ec2_ami.CreateImage.do(module, connection, None)
+ assert connection.create_image.call_count == 1
+ connection.create_image.assert_has_calls(
+ [
+ call(
+ aws_retry=True,
+ InstanceId="i-123456789",
+ Name="my-image",
+ )
+ ]
+ )
+
+
+def test_validate_params():
+ module = MagicMock()
+
+ ec2_ami.validate_params(module)
+ module.fail_json.assert_any_call("one of the following is required: name, image_id")
+ assert module.require_botocore_at_least.call_count == 0
+
+ module = MagicMock()
+ ec2_ami.validate_params(module, tpm_support=True)
+ assert module.require_botocore_at_least.call_count == 0
+
+ module = MagicMock()
+ ec2_ami.validate_params(module, tpm_support=True, boot_mode="legacy-bios")
+ assert module.require_botocore_at_least.call_count == 0
+ module.fail_json.assert_any_call("To specify 'tpm_support', 'boot_mode' must be 'uefi'.")
+
+ module = MagicMock()
+ ec2_ami.validate_params(module, state="present", name="bobby")
+ assert module.require_botocore_at_least.call_count == 0
+ module.fail_json.assert_any_call(
+ "The parameters instance_id or device_mapping (register from EBS snapshot) are required for a new image."
+ )
+
+
+def test_rename_item_if_exists():
+ dict_object = {
+ "Paris": True,
+ "London": {"Heathrow Airport": False},
+ }
+ ec2_ami.rename_item_if_exists(dict_object, "Paris", "NewYork")
+ assert dict_object == {"London": {"Heathrow Airport": False}, "NewYork": True}
+
+ dict_object = {
+ "Cities": {},
+ "London": "bar",
+ }
+
+ ec2_ami.rename_item_if_exists(dict_object, "London", "Abidjan", "Cities")
+ ec2_ami.rename_item_if_exists(dict_object, "Doesnt-exist", "Nowhere", "Cities")
+ assert dict_object == {"Cities": {"Abidjan": "bar"}}
+
+
+def test_DeregisterImage_defer_purge_snapshots():
+ image = {"BlockDeviceMappings": [{"Ebs": {"SnapshotId": "My_snapshot"}}, {}]}
+ func = ec2_ami.DeregisterImage.defer_purge_snapshots(image)
+
+ connection = MagicMock()
+ assert list(func(connection)) == ["My_snapshot"]
+ connection.delete_snapshot.assert_called_with(aws_retry=True, SnapshotId="My_snapshot")
+
+
+@patch(module_name + ".get_image_by_id")
+@patch(module_name + ".time.sleep")
+def test_DeregisterImage_timeout_success(m_sleep, m_get_image_by_id):
+ connection = MagicMock()
+ m_get_image_by_id.side_effect = [{"ImageId": "ami-0c7a795306730b288"}, None]
+
+ ec2_ami.DeregisterImage.timeout(connection, "ami-0c7a795306730b288", 10)
+ assert m_sleep.call_count == 1
+
+
+@patch(module_name + ".get_image_by_id")
+@patch(module_name + ".time.time")
+@patch(module_name + ".time.sleep")
+def test_DeregisterImage_timeout_failure(m_sleep, m_time, m_get_image_by_id):
+ connection = MagicMock()
+ m_time.side_effect = list(range(1, 30))
+ m_get_image_by_id.return_value = {"ImageId": "ami-0c7a795306730b288"}
+
+ with pytest.raises(ec2_ami.Ec2AmiFailure):
+ ec2_ami.DeregisterImage.timeout(connection, "ami-0c7a795306730b288", 10)
+ assert m_sleep.call_count == 9
+
+
+def test_UpdateImage_set_launch_permission_check_mode_no_change():
+ connection = MagicMock()
+ image = {"ImageId": "ami-0c7a795306730b288", "LaunchPermissions": {}}
+
+ changed = ec2_ami.UpdateImage.set_launch_permission(connection, image, launch_permissions={}, check_mode=True)
+ assert changed is False
+ assert connection.modify_image_attribute.call_count == 0
+
+ launch_permissions = {"user_ids": ["123456789012"], "group_names": ["foo", "bar"]}
+ image = {
+ "ImageId": "ami-0c7a795306730b288",
+ "LaunchPermissions": [
+ {"UserId": "123456789012"},
+ {"GroupName": "foo"},
+ {"GroupName": "bar"},
+ ],
+ }
+
+
+def test_UpdateImage_set_launch_permission_check_mode_with_change():
+ connection = MagicMock()
+ image = {"ImageId": "ami-0c7a795306730b288", "LaunchPermissions": {}}
+ launch_permissions = {"user_ids": ["123456789012"], "group_names": ["foo", "bar"]}
+ changed = ec2_ami.UpdateImage.set_launch_permission(connection, image, launch_permissions, check_mode=True)
+ assert changed is True
+ assert connection.modify_image_attribute.call_count == 0
+
+
+def test_UpdateImage_set_launch_permission_with_change():
+ connection = MagicMock()
+ image = {"ImageId": "ami-0c7a795306730b288", "LaunchPermissions": {}}
+ launch_permissions = {"user_ids": ["123456789012"], "group_names": ["foo", "bar"]}
+ changed = ec2_ami.UpdateImage.set_launch_permission(connection, image, launch_permissions, check_mode=False)
+ assert changed is True
+ assert connection.modify_image_attribute.call_count == 1
+ connection.modify_image_attribute.assert_called_with(
+ aws_retry=True,
+ ImageId="ami-0c7a795306730b288",
+ Attribute="launchPermission",
+ LaunchPermission={
+ "Add": [{"Group": "bar"}, {"Group": "foo"}, {"UserId": "123456789012"}],
+ "Remove": [],
+ },
+ )
+
+
+def test_UpdateImage_set_description():
+ connection = MagicMock()
+ module = MagicMock()
+ module.check_mode = False
+ image = {"ImageId": "ami-0c7a795306730b288", "Description": "My description"}
+ changed = ec2_ami.UpdateImage.set_description(connection, module, image, "My description")
+ assert changed is False
+
+ changed = ec2_ami.UpdateImage.set_description(connection, module, image, "New description")
+ assert changed is True
+ assert connection.modify_image_attribute.call_count == 1
+ connection.modify_image_attribute.assert_called_with(
+ aws_retry=True,
+ ImageId="ami-0c7a795306730b288",
+ Attribute="Description",
+ Description={"Value": "New description"},
+ )
+
+
+def test_UpdateImage_set_description_check_mode():
+ connection = MagicMock()
+ module = MagicMock()
+ module.check_mode = True
+ image = {"ImageId": "ami-0c7a795306730b288", "Description": "My description"}
+ changed = ec2_ami.UpdateImage.set_description(connection, module, image, "My description")
+ assert changed is False
+
+ changed = ec2_ami.UpdateImage.set_description(connection, module, image, "New description")
+ assert changed is True
+ assert connection.modify_image_attribute.call_count == 0
+
+
+def test_CreateImage_build_block_device_mapping():
+ device_mapping = [
+ {
+ "device_name": "/dev/xvda",
+ "volume_size": 8,
+ "snapshot_id": "snap-xxxxxxxx",
+ "delete_on_termination": True,
+ "volume_type": "gp2",
+ "no_device": False,
+ },
+ {
+ "device_name": "/dev/xvdb",
+ "no_device": True,
+ },
+ ]
+ result = ec2_ami.CreateImage.build_block_device_mapping(device_mapping)
+ assert result == [
+ {
+ "Ebs": {
+ "DeleteOnTermination": True,
+ "SnapshotId": "snap-xxxxxxxx",
+ "VolumeSize": 8,
+ "VolumeType": "gp2",
+ },
+ "DeviceName": "/dev/xvda",
+ },
+ {"DeviceName": "/dev/xvdb", "Ebs": {}, "NoDevice": ""},
+ ]
+
+
+def test_CreateImage_do_check_mode_no_change():
+ module = MagicMock()
+
+ module.params = {"name": "my-image"}
+ connection = MagicMock()
+ connection.describe_images.return_value = {
+ "Images": [
+ {
+ "InstanceId": "i-123456789",
+ "Name": "my-image",
+ }
+ ]
+ }
+
+ ec2_ami.CreateImage.do_check_mode(module, connection, None)
+ module.exit_json.assert_called_with(
+ changed=False,
+ msg="Error registering image: AMI name is already in use by another AMI",
+ )
+
+
+def test_CreateImage_do_check_mode_with_change():
+ module = MagicMock()
+
+ module.params = {"name": "my-image"}
+ connection = MagicMock()
+ connection.describe_images.return_value = {"Images": []}
+
+ ec2_ami.CreateImage.do_check_mode(module, connection, None)
+ module.exit_json.assert_called_with(changed=True, msg="Would have created a AMI if not in check mode.")
+
+
+@patch(module_name + ".get_waiter")
+def test_CreateImage_wait(m_get_waiter):
+ connection = MagicMock()
+ m_waiter = MagicMock()
+ m_get_waiter.return_value = m_waiter
+
+ assert ec2_ami.CreateImage.wait(connection, wait_timeout=0, image_id=None) is None
+
+ ec2_ami.CreateImage.wait(connection, wait_timeout=600, image_id="ami-0c7a795306730b288")
+ assert m_waiter.wait.call_count == 1
+ m_waiter.wait.assert_called_with(
+ ImageIds=["ami-0c7a795306730b288"],
+ WaiterConfig={"Delay": 15, "MaxAttempts": 40},
+ )
+
+
+@patch(module_name + ".add_ec2_tags")
+@patch(module_name + ".get_image_by_id")
+def test_CreateImage_set_tags(m_get_image_by_id, m_add_ec2_tags):
+ connection = MagicMock()
+ module = MagicMock()
+
+ m_get_image_by_id.return_value = {
+ "ImageId": "ami-0c7a795306730b288",
+ "BlockDeviceMappings": [
+ {"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": "50"}},
+ {
+ "DeviceName": "/dev/sdm",
+ "Ebs": {"VolumeSize": "100", "SnapshotId": "snap-066877671789bd71b"},
+ },
+ {"DeviceName": "/dev/sda2"},
+ ],
+ }
+ tags = {}
+ ec2_ami.CreateImage.set_tags(connection, module, tags, image_id="ami-0c7a795306730b288")
+ assert m_add_ec2_tags.call_count == 0
+
+ tags = {"metro": "LaSalle"}
+ ec2_ami.CreateImage.set_tags(connection, module, tags, image_id="ami-0c7a795306730b288")
+ assert m_add_ec2_tags.call_count == 3
+ m_add_ec2_tags.assert_called_with(connection, module, "snap-066877671789bd71b", tags)
+
+
+def test_CreateInage_set_launch_permissions():
+ connection = MagicMock()
+ launch_permissions = {"user_ids": ["123456789012"], "group_names": ["foo", "bar"]}
+ image_id = "ami-0c7a795306730b288"
+ ec2_ami.CreateImage.set_launch_permissions(connection, launch_permissions, image_id)
+
+ assert connection.modify_image_attribute.call_count == 1
+ connection.modify_image_attribute.assert_called_with(
+ Attribute="LaunchPermission",
+ ImageId="ami-0c7a795306730b288",
+ LaunchPermission={"Add": [{"Group": "foo"}, {"Group": "bar"}, {"UserId": "123456789012"}]},
+ aws_retry=True,
+ )
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami_info.py
new file mode 100644
index 000000000..a5abc77af
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_ami_info.py
@@ -0,0 +1,224 @@
+# (c) 2022 Red Hat Inc.
+
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+import botocore.exceptions
+import pytest
+
+from ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_ami_info
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_ami_info"
+
+
+@pytest.fixture
+def ec2_client():
+ return MagicMock()
+
+
+@pytest.mark.parametrize(
+ "executable_users,filters,image_ids,owners,expected",
+ [
+ ([], {}, [], [], {}),
+ ([], {}, ["ami-1234567890"], [], {"ImageIds": ["ami-1234567890"]}),
+ ([], {}, [], ["1234567890"], {"Filters": [{"Name": "owner-id", "Values": ["1234567890"]}]}),
+ (
+ [],
+ {"owner-alias": "test_ami_owner"},
+ [],
+ ["1234567890"],
+ {
+ "Filters": [
+ {"Name": "owner-alias", "Values": ["test_ami_owner"]},
+ {"Name": "owner-id", "Values": ["1234567890"]},
+ ]
+ },
+ ),
+ ([], {"is-public": True}, [], [], {"Filters": [{"Name": "is-public", "Values": ["true"]}]}),
+ (["self"], {}, [], [], {"ExecutableUsers": ["self"]}),
+ ([], {}, [], ["self"], {"Owners": ["self"]}),
+ ],
+)
+def test_build_request_args(executable_users, filters, image_ids, owners, expected):
+ assert ec2_ami_info.build_request_args(executable_users, filters, image_ids, owners) == expected
+
+
+def test_get_images(ec2_client):
+ ec2_client.describe_images.return_value = {
+ "Images": [
+ {
+ "Architecture": "x86_64",
+ "BlockDeviceMappings": [
+ {
+ "DeviceName": "/dev/sda1",
+ "Ebs": {
+ "DeleteOnTermination": "True",
+ "Encrypted": "False",
+ "SnapshotId": "snap-0f00cba784af62428",
+ "VolumeSize": 10,
+ "VolumeType": "gp2",
+ },
+ }
+ ],
+ "ImageId": "ami-1234567890",
+ "ImageLocation": "1234567890/test-ami-uefi-boot",
+ "ImageType": "machine",
+ "Name": "test-ami-uefi-boot",
+ "OwnerId": "1234567890",
+ "PlatformDetails": "Linux/UNIX",
+ }
+ ],
+ }
+
+ request_args = {"ImageIds": ["ami-1234567890"]}
+
+ get_images_result = ec2_ami_info.get_images(ec2_client, request_args)
+
+ ec2_client.describe_images.call_count == 2
+ ec2_client.describe_images.assert_called_with(aws_retry=True, **request_args)
+ assert get_images_result == ec2_client.describe_images.return_value
+
+
+def test_get_image_attribute():
+ ec2_client = MagicMock()
+
+ ec2_client.describe_image_attribute.return_value = {
+ "ImageId": "ami-1234567890",
+ "LaunchPermissions": [{"UserId": "1234567890"}, {"UserId": "0987654321"}],
+ }
+
+ image_id = "ami-1234567890"
+
+ get_image_attribute_result = ec2_ami_info.get_image_attribute(ec2_client, image_id)
+
+ ec2_client.describe_image_attribute.call_count == 1
+ ec2_client.describe_image_attribute.assert_called_with(
+ aws_retry=True, Attribute="launchPermission", ImageId=image_id
+ )
+ assert len(get_image_attribute_result["LaunchPermissions"]) == 2
+
+
+@patch(module_name + ".get_image_attribute")
+@patch(module_name + ".get_images")
+def test_list_ec2_images(m_get_images, m_get_image_attribute):
+ module = MagicMock()
+
+ m_get_images.return_value = {
+ "Images": [
+ {
+ "Architecture": "x86_64",
+ "BlockDeviceMappings": [
+ {
+ "DeviceName": "/dev/sda1",
+ "Ebs": {
+ "DeleteOnTermination": "True",
+ "Encrypted": "False",
+ "SnapshotId": "snap-0f00cba784af62428",
+ "VolumeSize": 10,
+ "VolumeType": "gp2",
+ },
+ }
+ ],
+ "ImageId": "ami-1234567890",
+ "ImageLocation": "1234567890/test-ami-uefi-boot",
+ "ImageType": "machine",
+ "Name": "test-ami-uefi-boot",
+ "OwnerId": "1234567890",
+ "OwnerAlias": "test_ami_owner",
+ "PlatformDetails": "Linux/UNIX",
+ },
+ {
+ "Architecture": "x86_64",
+ "BlockDeviceMappings": [
+ {
+ "DeviceName": "/dev/sda1",
+ "Ebs": {
+ "DeleteOnTermination": "True",
+ "Encrypted": "False",
+ "SnapshotId": "snap-0f00cba784af62428",
+ "VolumeSize": 10,
+ "VolumeType": "gp2",
+ },
+ }
+ ],
+ "ImageId": "ami-1523498760",
+ "ImageLocation": "1523498760/test-ami-uefi-boot",
+ "ImageType": "machine",
+ "Name": "test-ami-uefi-boot",
+ "OwnerId": "1234567890",
+ "OwnerAlias": "test_ami_owner",
+ "PlatformDetails": "Linux/UNIX",
+ },
+ ],
+ }
+
+ m_get_image_attribute.return_value = {
+ "ImageId": "ami-1234567890",
+ "LaunchPermissions": [{"UserId": "1234567890"}, {"UserId": "0987654321"}],
+ }
+
+ images = m_get_images.return_value["Images"]
+ images = [camel_dict_to_snake_dict(image) for image in images]
+
+ request_args = {
+ "Filters": [
+ {"Name": "owner-alias", "Values": ["test_ami_owner"]},
+ {"Name": "owner-id", "Values": ["1234567890"]},
+ ]
+ }
+
+ # needed for `assert m_get_image_attribute.call_count == 2`
+ module.params = {"describe_image_attributes": True}
+
+ list_ec2_images_result = ec2_ami_info.list_ec2_images(ec2_client, module, request_args)
+
+ assert m_get_images.call_count == 1
+ m_get_images.assert_called_with(ec2_client, request_args)
+
+ assert m_get_image_attribute.call_count == 2
+ m_get_image_attribute.assert_has_calls(
+ [call(ec2_client, images[0]["image_id"])],
+ [call(ec2_client, images[1]["image_id"])],
+ )
+
+ assert len(list_ec2_images_result) == 2
+ assert list_ec2_images_result[0]["image_id"] == "ami-1234567890"
+ assert list_ec2_images_result[1]["image_id"] == "ami-1523498760"
+
+
+@patch(module_name + ".AnsibleAWSModule")
+def test_main_success(m_AnsibleAWSModule):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+
+ ec2_ami_info.main()
+
+ m_module.client.assert_called_with("ec2", retry_decorator=ANY)
+ m_module.exit_json.assert_called_with(images=[])
+
+
+def a_boto_exception():
+ return botocore.exceptions.UnknownServiceError(service_name="Whoops", known_service_names="Oula")
+
+
+def test_api_failure_get_images(ec2_client):
+ request_args = {}
+ ec2_client.describe_images.side_effect = a_boto_exception()
+
+ with pytest.raises(ec2_ami_info.AmiInfoFailure):
+ ec2_ami_info.get_images(ec2_client, request_args)
+
+
+def test_api_failure_get_image_attribute(ec2_client):
+ image_id = "ami-1234567890"
+ ec2_client.describe_image_attribute.side_effect = a_boto_exception()
+
+ with pytest.raises(ec2_ami_info.AmiInfoFailure):
+ ec2_ami_info.get_image_attribute(ec2_client, image_id)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_eni_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_eni_info.py
new file mode 100644
index 000000000..d6323601d
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_eni_info.py
@@ -0,0 +1,108 @@
+# (c) 2022 Red Hat Inc.
+
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_eni_info
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_eni_info"
+
+
+@pytest.mark.parametrize(
+ "eni_id,filters,expected", [("", {}, {}), ("eni-1234567890", {}, {"NetworkInterfaceIds": ["eni-1234567890"]})]
+)
+def test_build_request_args(eni_id, filters, expected):
+ assert ec2_eni_info.build_request_args(eni_id, filters) == expected
+
+
+def test_get_network_interfaces():
+ connection = MagicMock()
+ module = MagicMock()
+
+ connection.describe_network_interfaces.return_value = {
+ "NetworkInterfaces": [
+ {
+ "AvailabilityZone": "us-east-2b",
+ "Description": "",
+ "NetworkInterfaceId": "eni-1234567890",
+ "PrivateIpAddresses": [{"Primary": "True", "PrivateIpAddress": "11.22.33.44"}],
+ "RequesterManaged": False,
+ "SourceDestCheck": True,
+ "Status": "available",
+ "SubnetId": "subnet-07d906b8358869bda",
+ "TagSet": [],
+ "VpcId": "vpc-0cb60952be96c9cd8",
+ }
+ ]
+ }
+
+ request_args = {"NetworkInterfaceIds": ["eni-1234567890"]}
+
+ network_interfaces_result = ec2_eni_info.get_network_interfaces(connection, module, request_args)
+
+ connection.describe_network_interfaces.call_count == 1
+ connection.describe_network_interfaces.assert_called_with(aws_retry=True, **request_args)
+ assert len(network_interfaces_result["NetworkInterfaces"]) == 1
+
+
+@patch(module_name + ".get_network_interfaces")
+def test_list_eni(m_get_network_interfaces):
+ connection = MagicMock()
+ module = MagicMock()
+
+ m_get_network_interfaces.return_value = {
+ "NetworkInterfaces": [
+ {
+ "AvailabilityZone": "us-east-2b",
+ "Description": "",
+ "NetworkInterfaceId": "eni-1234567890",
+ "PrivateIpAddresses": [{"Primary": "True", "PrivateIpAddress": "11.22.33.44"}],
+ "RequesterManaged": False,
+ "SourceDestCheck": True,
+ "Status": "available",
+ "SubnetId": "subnet-07d906b8358869bda",
+ "TagSet": [],
+ "VpcId": "vpc-0cb60952be96c9cd8",
+ },
+ {
+ "AvailabilityZone": "us-east-2b",
+ "Description": "",
+ "NetworkInterfaceId": "eni-0987654321",
+ "PrivateIpAddresses": [{"Primary": "True", "PrivateIpAddress": "11.22.33.44"}],
+ "RequesterManaged": False,
+ "SourceDestCheck": True,
+ "Status": "available",
+ "SubnetId": "subnet-07d906b8358869bda",
+ "TagSet": [
+ {"Key": "Name", "Value": "my-test-eni-name"},
+ ],
+ "VpcId": "vpc-0cb60952be96c9cd8",
+ },
+ ]
+ }
+
+ request_args = {"Filters": [{"Name": "owner-id", "Values": ["1234567890"]}]}
+
+ camel_network_interfaces = ec2_eni_info.list_eni(connection, module, request_args)
+
+ m_get_network_interfaces.call_count == 1
+ m_get_network_interfaces.assert_has_calls(
+ [
+ call(connection, module, request_args),
+ ]
+ )
+ assert len(camel_network_interfaces) == 2
+
+ assert camel_network_interfaces[0]["id"] == "eni-1234567890"
+ assert camel_network_interfaces[0]["tags"] == {}
+ assert camel_network_interfaces[0].get("name") is None
+
+ assert camel_network_interfaces[1]["id"] == "eni-0987654321"
+ assert camel_network_interfaces[1]["tags"] == {"Name": "my-test-eni-name"}
+ assert camel_network_interfaces[1]["name"] == "my-test-eni-name"
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_import_image.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_import_image.py
new file mode 100644
index 000000000..6830fe358
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_import_image.py
@@ -0,0 +1,224 @@
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+from unittest.mock import patch
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_import_image
+from ansible_collections.amazon.aws.plugins.modules import ec2_import_image_info
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_import_image"
+module_name_info = "ansible_collections.amazon.aws.plugins.modules.ec2_import_image_info"
+utils = "ansible_collections.amazon.aws.plugins.module_utils.ec2"
+
+expected_result = {
+ "import_task_id": "import-ami-0c207d759080a3dff",
+ "progress": "19",
+ "snapshot_details": [
+ {
+ "disk_image_size": 26843545600.0,
+ "format": "RAW",
+ "status": "active",
+ "user_bucket": {"s3_bucket": "clone-vm-s3-bucket", "s3_key": "clone-vm-s3-bucket/ubuntu-vm-clone.raw"},
+ }
+ ],
+ "status": "active",
+ "status_message": "converting",
+ "tags": {"Name": "clone-vm-import-image"},
+ "task_name": "clone-vm-import-image",
+}
+
+describe_import_image_tasks = [
+ {
+ "ImportTaskId": "import-ami-0c207d759080a3dff",
+ "Progress": "19",
+ "SnapshotDetails": [
+ {
+ "DiskImageSize": 26843545600.0,
+ "Format": "RAW",
+ "Status": "active",
+ "UserBucket": {"S3Bucket": "clone-vm-s3-bucket", "S3Key": "clone-vm-s3-bucket/ubuntu-vm-clone.raw"},
+ }
+ ],
+ "Status": "active",
+ "StatusMessage": "converting",
+ "Tags": [{"Key": "Name", "Value": "clone-vm-import-image"}],
+ }
+]
+
+
+@pytest.fixture
+def paginate():
+ # Create a MagicMock for the paginate object
+ paginate_mock = MagicMock()
+
+ return paginate_mock
+
+
+@pytest.fixture
+def conn_paginator(paginate):
+ conn_paginator_mock = MagicMock()
+ conn_paginator_mock.paginate.return_value = paginate
+ return conn_paginator_mock
+
+
+@pytest.fixture
+def client(conn_paginator):
+ client_mock = MagicMock()
+
+ # Configure the client.get_paginator to return the conn_paginator
+ client_mock.get_paginator.return_value = conn_paginator
+
+ return client_mock
+
+
+@pytest.fixture
+def module():
+ # Create a MagicMock for the module object
+ module_mock = MagicMock()
+ module_mock.params = {
+ "task_name": "clone-vm-import-image",
+ "disk_containers": [
+ {
+ "format": "raw",
+ "user_bucket": {"s3_bucket": "clone-vm-s3-bucket", "s3_key": "clone-vm-s3-bucket/ubuntu-vm-clone.raw"},
+ }
+ ],
+ }
+ module_mock.check_mode = False
+
+ return module_mock
+
+
+@pytest.mark.parametrize(
+ "side_effects, expected_result",
+ [
+ (
+ [{"ImportImageTasks": []}, {"ImportImageTasks": describe_import_image_tasks}],
+ {"changed": True, "import_image": expected_result},
+ ),
+ (
+ [{"ImportImageTasks": describe_import_image_tasks}, {"ImportImageTasks": describe_import_image_tasks}],
+ {
+ "changed": False,
+ "msg": "An import task with the specified name already exists",
+ "import_image": expected_result,
+ },
+ ),
+ ],
+)
+def test_present_no_check_mode(client, module, paginate, side_effects, expected_result):
+ paginate.build_full_result.side_effect = side_effects
+ module.exit_json.side_effect = SystemExit(1)
+
+ with patch(utils + ".helper_describe_import_image_tasks", return_value=paginate):
+ with pytest.raises(SystemExit):
+ ec2_import_image.present(client, module)
+
+ module.exit_json.assert_called_with(**expected_result)
+
+
+@pytest.mark.parametrize(
+ "side_effects, expected_result",
+ [
+ (
+ [{"ImportImageTasks": []}, {"ImportImageTasks": describe_import_image_tasks}],
+ {"changed": True, "msg": "Would have created the import task if not in check mode"},
+ ),
+ (
+ [{"ImportImageTasks": describe_import_image_tasks}, {"ImportImageTasks": describe_import_image_tasks}],
+ {
+ "changed": False,
+ "msg": "An import task with the specified name already exists",
+ "import_image": expected_result,
+ },
+ ),
+ ],
+)
+def test_present_check_mode(client, module, paginate, side_effects, expected_result):
+ paginate.build_full_result.side_effect = side_effects
+ module.check_mode = True
+ module.exit_json.side_effect = SystemExit(1)
+
+ with patch(utils + ".helper_describe_import_image_tasks", return_value=paginate):
+ with pytest.raises(SystemExit):
+ ec2_import_image.present(client, module)
+
+ module.exit_json.assert_called_with(**expected_result)
+
+
+@pytest.mark.parametrize(
+ "side_effect, expected_result",
+ [
+ (
+ [
+ {"ImportImageTasks": []},
+ ],
+ {
+ "changed": False,
+ "msg": "The specified import task does not exist or it cannot be cancelled",
+ "import_image": {},
+ },
+ ),
+ (
+ [
+ {"ImportImageTasks": describe_import_image_tasks},
+ ],
+ {"changed": True, "import_image": expected_result},
+ ),
+ ],
+)
+def test_absent_no_check_mode(client, module, paginate, side_effect, expected_result):
+ paginate.build_full_result.side_effect = side_effect
+ module.exit_json.side_effect = SystemExit(1)
+
+ with patch(utils + ".helper_describe_import_image_tasks", return_value=paginate):
+ with pytest.raises(SystemExit):
+ ec2_import_image.absent(client, module)
+
+ module.exit_json.assert_called_with(**expected_result)
+
+
+@pytest.mark.parametrize(
+ "side_effect, expected_result",
+ [
+ (
+ [
+ {"ImportImageTasks": []},
+ ],
+ {
+ "changed": False,
+ "msg": "The specified import task does not exist or it cannot be cancelled",
+ "import_image": {},
+ },
+ ),
+ (
+ [
+ {"ImportImageTasks": describe_import_image_tasks},
+ ],
+ {"changed": True, "import_image": expected_result},
+ ),
+ ],
+)
+def test_present_check_mode(client, module, paginate, side_effect, expected_result):
+ paginate.build_full_result.side_effect = side_effect
+ module.exit_json.side_effect = SystemExit(1)
+
+ with patch(utils + ".helper_describe_import_image_tasks", return_value=paginate):
+ with pytest.raises(SystemExit):
+ ec2_import_image.absent(client, module)
+
+ module.exit_json.assert_called_with(**expected_result)
+
+
+@patch(module_name_info + ".AnsibleAWSModule")
+def test_main_success(m_AnsibleAWSModule):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+
+ ec2_import_image_info.main()
+
+ m_module.client.assert_called_with("ec2", retry_decorator=ANY)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_key.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_key.py
index 2660ced63..cbcf02588 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_key.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_key.py
@@ -1,17 +1,17 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+import copy
+import datetime
+from unittest.mock import ANY
from unittest.mock import MagicMock
from unittest.mock import patch
-from unittest.mock import call, ANY
-import pytest
import botocore
-import datetime
+import pytest
from dateutil.tz import tzutc
-from ansible.module_utils._text import to_bytes
-from ansible_collections.amazon.aws.plugins.module_utils.core import is_boto3_error_code
+from ansible.module_utils._text import to_bytes
from ansible_collections.amazon.aws.plugins.modules import ec2_key
@@ -19,47 +19,41 @@ module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_key"
def raise_botocore_exception_clienterror(action):
-
params = {
- 'Error': {
- 'Code': 1,
- 'Message': 'error creating key'
- },
- 'ResponseMetadata': {
- 'RequestId': '01234567-89ab-cdef-0123-456789abcdef'
- }
+ "Error": {"Code": 1, "Message": "error creating key"},
+ "ResponseMetadata": {"RequestId": "01234567-89ab-cdef-0123-456789abcdef"},
}
- if action == 'create_key_pair':
- params['Error']['Message'] = 'error creating key'
+ if action == "create_key_pair":
+ params["Error"]["Message"] = "error creating key"
- elif action == 'describe_key_pair':
- params['Error']['Code'] = 'InvalidKeyPair.NotFound'
- params['Error']['Message'] = 'The key pair does not exist'
+ elif action == "describe_key_pair":
+ params["Error"]["Code"] = "InvalidKeyPair.NotFound"
+ params["Error"]["Message"] = "The key pair does not exist"
- elif action == 'import_key_pair':
- params['Error']['Message'] = 'error importing key'
+ elif action == "import_key_pair":
+ params["Error"]["Message"] = "error importing key"
- elif action == 'delete_key_pair':
- params['Error']['Message'] = 'error deleting key'
+ elif action == "delete_key_pair":
+ params["Error"]["Message"] = "error deleting key"
return botocore.exceptions.ClientError(params, action)
def test__import_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key_material = "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com"
expected_params = {
- 'KeyName': name,
- 'PublicKeyMaterial': to_bytes(key_material),
+ "KeyName": name,
+ "PublicKeyMaterial": to_bytes(key_material),
}
ec2_client.import_key_pair.return_value = {
- 'KeyFingerprint': 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-012345678905a208d'
+ "KeyFingerprint": "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-012345678905a208d",
}
result = ec2_key._import_key_pair(ec2_client, name, key_material)
@@ -71,22 +65,21 @@ def test__import_key_pair():
def test_api_failure__import_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key_material = "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com"
expected_params = {
- 'KeyName': name,
- 'PublicKeyMaterial': to_bytes(key_material),
+ "KeyName": name,
+ "PublicKeyMaterial": to_bytes(key_material),
}
- ec2_client.import_key_pair.side_effect = raise_botocore_exception_clienterror('import_key_pair')
+ ec2_client.import_key_pair.side_effect = raise_botocore_exception_clienterror("import_key_pair")
with pytest.raises(ec2_key.Ec2KeyFailure):
ec2_key._import_key_pair(ec2_client, name, key_material)
def test_extract_key_data_describe_key_pairs():
-
key = {
"CreateTime": datetime.datetime(2022, 9, 15, 20, 10, 15, tzinfo=tzutc()),
"KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
@@ -96,66 +89,61 @@ def test_extract_key_data_describe_key_pairs():
}
key_type = "rsa"
-
+ file_name = MagicMock()
expected_result = {
"name": "my_keypair",
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
- result = ec2_key.extract_key_data(key, key_type)
+ result = ec2_key.extract_key_data(key, key_type, file_name)
assert result == expected_result
def test_extract_key_data_create_key_pair():
-
key = {
- 'KeyFingerprint': '11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-043046ef2a9a80b56'
+ "KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-043046ef2a9a80b56",
}
key_type = "rsa"
-
+ file_name = MagicMock()
expected_result = {
"name": "my_keypair",
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
- result = ec2_key.extract_key_data(key, key_type)
+ result = ec2_key.extract_key_data(key, key_type, file_name)
assert result == expected_result
-@patch(module_name + '.delete_key_pair')
-@patch(module_name + '._import_key_pair')
-@patch(module_name + '.find_key_pair')
+@patch(module_name + ".delete_key_pair")
+@patch(module_name + "._import_key_pair")
+@patch(module_name + ".find_key_pair")
def test_get_key_fingerprint(m_find_key_pair, m_import_key_pair, m_delete_key_pair):
-
module = MagicMock()
ec2_client = MagicMock()
+ file_name = MagicMock()
m_find_key_pair.return_value = None
m_import_key_pair.return_value = {
- 'KeyFingerprint': 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-043046ef2a9a80b56'
+ "KeyFingerprint": "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-043046ef2a9a80b56",
}
- m_delete_key_pair.return_value = {
- 'changed': True,
- 'key': None,
- 'msg': 'key deleted'
- }
+ m_delete_key_pair.return_value = {"changed": True, "key": None, "msg": "key deleted"}
- expected_result = 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62'
+ expected_result = "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62"
key_material = "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com"
@@ -169,17 +157,17 @@ def test_get_key_fingerprint(m_find_key_pair, m_import_key_pair, m_delete_key_pa
def test_find_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
ec2_client.describe_key_pairs.return_value = {
- 'KeyPairs': [
+ "KeyPairs": [
{
- 'CreateTime': datetime.datetime(2022, 9, 15, 20, 10, 15, tzinfo=tzutc()),
- 'KeyFingerprint': '11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-043046ef2a9a80b56',
- 'KeyType': 'rsa',
- 'Tags': []
+ "CreateTime": datetime.datetime(2022, 9, 15, 20, 10, 15, tzinfo=tzutc()),
+ "KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-043046ef2a9a80b56",
+ "KeyType": "rsa",
+ "Tags": [],
}
],
}
@@ -192,7 +180,7 @@ def test_find_key_pair():
def test_api_failure_find_key_pair():
ec2_client = MagicMock()
- name = 'non_existing_keypair'
+ name = "non_existing_keypair"
ec2_client.describe_key_pairs.side_effect = botocore.exceptions.BotoCoreError
@@ -202,9 +190,9 @@ def test_api_failure_find_key_pair():
def test_invalid_key_pair_find_key_pair():
ec2_client = MagicMock()
- name = 'non_existing_keypair'
+ name = "non_existing_keypair"
- ec2_client.describe_key_pairs.side_effect = raise_botocore_exception_clienterror('describe_key_pair')
+ ec2_client.describe_key_pairs.side_effect = raise_botocore_exception_clienterror("describe_key_pair")
result = ec2_key.find_key_pair(ec2_client, name)
@@ -213,11 +201,11 @@ def test_invalid_key_pair_find_key_pair():
def test__create_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
tag_spec = None
key_type = None
- expected_params = {'KeyName': name}
+ expected_params = {"KeyName": name}
ec2_client.create_key_pair.return_value = {
"KeyFingerprint": "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62",
@@ -239,33 +227,33 @@ def test__create_key_pair():
def test_api_failure__create_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
tag_spec = None
key_type = None
- ec2_client.create_key_pair.side_effect = raise_botocore_exception_clienterror('create_key_pair')
+ ec2_client.create_key_pair.side_effect = raise_botocore_exception_clienterror("create_key_pair")
with pytest.raises(ec2_key.Ec2KeyFailure):
ec2_key._create_key_pair(ec2_client, name, tag_spec, key_type)
-@patch(module_name + '.extract_key_data')
-@patch(module_name + '._import_key_pair')
+@patch(module_name + ".extract_key_data")
+@patch(module_name + "._import_key_pair")
def test_create_new_key_pair_key_material(m_import_key_pair, m_extract_key_data):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key_material = "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com"
- key_type = 'rsa'
+ key_type = "rsa"
tags = None
-
+ file_name = MagicMock()
module.check_mode = False
m_import_key_pair.return_value = {
- 'KeyFingerprint': 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-012345678905a208d'
+ "KeyFingerprint": "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-012345678905a208d",
}
m_extract_key_data.return_value = {
@@ -273,35 +261,36 @@ def test_create_new_key_pair_key_material(m_import_key_pair, m_extract_key_data)
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
- expected_result = {'changed': True, 'key': m_extract_key_data.return_value, 'msg': 'key pair created'}
+ expected_result = {"changed": True, "key": m_extract_key_data.return_value, "msg": "key pair created"}
- result = ec2_key.create_new_key_pair(ec2_client, name, key_material, key_type, tags, module.check_mode)
+ result = ec2_key.create_new_key_pair(ec2_client, name, key_material, key_type, tags, file_name, module.check_mode)
assert result == expected_result
assert m_import_key_pair.call_count == 1
assert m_extract_key_data.call_count == 1
-@patch(module_name + '.extract_key_data')
-@patch(module_name + '._create_key_pair')
+@patch(module_name + ".extract_key_data")
+@patch(module_name + "._create_key_pair")
def test_create_new_key_pair_no_key_material(m_create_key_pair, m_extract_key_data):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
- key_type = 'rsa'
+ name = "my_keypair"
+ key_type = "rsa"
key_material = None
tags = None
-
+ file_name = MagicMock()
+ # TODO. file_name=sth
module.check_mode = False
m_create_key_pair.return_value = {
- 'KeyFingerprint': 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-012345678905a208d'
+ "KeyFingerprint": "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-012345678905a208d",
}
m_extract_key_data.return_value = {
@@ -309,12 +298,12 @@ def test_create_new_key_pair_no_key_material(m_create_key_pair, m_extract_key_da
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
- expected_result = {'changed': True, 'key': m_extract_key_data.return_value, 'msg': 'key pair created'}
+ expected_result = {"changed": True, "key": m_extract_key_data.return_value, "msg": "key pair created"}
- result = ec2_key.create_new_key_pair(ec2_client, name, key_material, key_type, tags, module.check_mode)
+ result = ec2_key.create_new_key_pair(ec2_client, name, key_material, key_type, tags, file_name, module.check_mode)
assert result == expected_result
assert m_create_key_pair.call_count == 1
@@ -324,7 +313,7 @@ def test_create_new_key_pair_no_key_material(m_create_key_pair, m_extract_key_da
def test__delete_key_pair():
ec2_client = MagicMock()
- key_name = 'my_keypair'
+ key_name = "my_keypair"
ec2_key._delete_key_pair(ec2_client, key_name)
assert ec2_client.delete_key_pair.call_count == 1
@@ -333,23 +322,25 @@ def test__delete_key_pair():
def test_api_failure__delete_key_pair():
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
- ec2_client.delete_key_pair.side_effect = raise_botocore_exception_clienterror('delete_key_pair')
+ ec2_client.delete_key_pair.side_effect = raise_botocore_exception_clienterror("delete_key_pair")
with pytest.raises(ec2_key.Ec2KeyFailure):
ec2_key._delete_key_pair(ec2_client, name)
-@patch(module_name + '.extract_key_data')
-@patch(module_name + '._import_key_pair')
-@patch(module_name + '.delete_key_pair')
-@patch(module_name + '.get_key_fingerprint')
-def test_update_key_pair_by_key_material_update_needed(m_get_key_fingerprint, m_delete_key_pair, m__import_key_pair, m_extract_key_data):
+@patch(module_name + ".extract_key_data")
+@patch(module_name + "._import_key_pair")
+@patch(module_name + ".delete_key_pair")
+@patch(module_name + ".get_key_fingerprint")
+def test_update_key_pair_by_key_material_update_needed(
+ m_get_key_fingerprint, m_delete_key_pair, m__import_key_pair, m_extract_key_data
+):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key_material = "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com"
tag_spec = None
key = {
@@ -358,16 +349,15 @@ def test_update_key_pair_by_key_material_update_needed(m_get_key_fingerprint, m_
"KeyPairId": "key-043046ef2a9a80b56",
"Tags": {},
}
-
module.check_mode = False
- m_get_key_fingerprint.return_value = 'd7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62'
+ m_get_key_fingerprint.return_value = "d7:ff:a6:63:18:64:9c:57:a1:ee:ca:a4:ad:c2:81:62"
m_delete_key_pair.return_value = None
m__import_key_pair.return_value = {
- 'KeyFingerprint': '11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-043046ef2a9a80b56',
- 'Tags': {},
+ "KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-043046ef2a9a80b56",
+ "Tags": {},
}
m_extract_key_data.return_value = {
"name": "my_keypair",
@@ -376,7 +366,7 @@ def test_update_key_pair_by_key_material_update_needed(m_get_key_fingerprint, m_
"tags": {},
}
- expected_result = {'changed': True, 'key': m_extract_key_data.return_value, 'msg': "key pair updated"}
+ expected_result = {"changed": True, "key": m_extract_key_data.return_value, "msg": "key pair updated"}
result = ec2_key.update_key_pair_by_key_material(module.check_mode, ec2_client, name, key, key_material, tag_spec)
@@ -407,7 +397,6 @@ def test_update_key_pair_by_key_material_key_exists(m_get_key_fingerprint, m_ext
"KeyPairId": key_id,
"Tags": {},
}
-
check_mode = False
m_get_key_fingerprint.return_value = key_fingerprint
m_extract_key_data.return_value = {
@@ -434,31 +423,31 @@ def test_update_key_pair_by_key_type_update_needed(m_delete_key_pair, m__create_
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
- key_type = 'rsa'
+ name = "my_keypair"
+ key_type = "rsa"
tag_spec = None
-
+ file_name = MagicMock()
module.check_mode = False
m_delete_key_pair.return_value = None
m__create_key_pair.return_value = {
- 'KeyFingerprint': '11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa',
- 'Name': 'my_keypair',
- 'Id': 'key-043046ef2a9a80b56',
- 'Tags': {},
- 'Type': 'rsa'
+ "KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "Name": "my_keypair",
+ "Id": "key-043046ef2a9a80b56",
+ "Tags": {},
+ "Type": "rsa",
}
m_extract_key_data.return_value = {
"name": "my_keypair",
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
expected_result = {"changed": True, "key": m_extract_key_data.return_value, "msg": "key pair updated"}
- result = ec2_key.update_key_pair_by_key_type(module.check_mode, ec2_client, name, key_type, tag_spec)
+ result = ec2_key.update_key_pair_by_key_type(module.check_mode, ec2_client, name, key_type, tag_spec, file_name)
assert result == expected_result
assert m_delete_key_pair.call_count == 1
@@ -466,30 +455,30 @@ def test_update_key_pair_by_key_type_update_needed(m_delete_key_pair, m__create_
assert m_extract_key_data.call_count == 1
m_delete_key_pair.assert_called_with(module.check_mode, ec2_client, name, finish_task=False)
m__create_key_pair.assert_called_with(ec2_client, name, tag_spec, key_type)
- m_extract_key_data.assert_called_with(m__create_key_pair.return_value, key_type)
+ m_extract_key_data.assert_called_with(m__create_key_pair.return_value, key_type, file_name)
-@patch(module_name + '.update_key_pair_by_key_material')
+@patch(module_name + ".update_key_pair_by_key_material")
def test_handle_existing_key_pair_update_key_matrial_with_force(m_update_key_pair_by_key_material):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key = {
"KeyName": "my_keypair",
"KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"KeyPairId": "key-043046ef2a9a80b56",
"Tags": {},
- "KeyType": "rsa"
+ "KeyType": "rsa",
}
module.params = {
- 'key_material': "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
- 'force': True,
- 'key_type': 'rsa',
- 'tags': None,
- 'purge_tags': True,
- 'tag_spec': None
+ "key_material": "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
+ "force": True,
+ "key_type": "rsa",
+ "tags": None,
+ "purge_tags": True,
+ "tag_spec": None,
}
key_data = {
@@ -499,9 +488,9 @@ def test_handle_existing_key_pair_update_key_matrial_with_force(m_update_key_pai
"tags": {},
}
- m_update_key_pair_by_key_material.return_value = {'changed': True, 'key': key_data, 'msg': "key pair updated"}
+ m_update_key_pair_by_key_material.return_value = {"changed": True, "key": key_data, "msg": "key pair updated"}
- expected_result = {'changed': True, 'key': key_data, 'msg': "key pair updated"}
+ expected_result = {"changed": True, "key": key_data, "msg": "key pair updated"}
result = ec2_key.handle_existing_key_pair_update(module, ec2_client, name, key)
@@ -509,27 +498,27 @@ def test_handle_existing_key_pair_update_key_matrial_with_force(m_update_key_pai
assert m_update_key_pair_by_key_material.call_count == 1
-@patch(module_name + '.update_key_pair_by_key_type')
+@patch(module_name + ".update_key_pair_by_key_type")
def test_handle_existing_key_pair_update_key_type(m_update_key_pair_by_key_type):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key = {
"KeyName": "my_keypair",
"KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"KeyPairId": "key-043046ef2a9a80b56",
"Tags": {},
- "KeyType": "ed25519"
+ "KeyType": "ed25519",
}
module.params = {
- 'key_material': "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
- 'force': False,
- 'key_type': 'rsa',
- 'tags': None,
- 'purge_tags': True,
- 'tag_spec': None
+ "key_material": "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
+ "force": False,
+ "key_type": "rsa",
+ "tags": None,
+ "purge_tags": True,
+ "tag_spec": None,
}
key_data = {
@@ -539,9 +528,9 @@ def test_handle_existing_key_pair_update_key_type(m_update_key_pair_by_key_type)
"tags": {},
}
- m_update_key_pair_by_key_type.return_value = {'changed': True, 'key': key_data, 'msg': "key pair updated"}
+ m_update_key_pair_by_key_type.return_value = {"changed": True, "key": key_data, "msg": "key pair updated"}
- expected_result = {'changed': True, 'key': key_data, 'msg': "key pair updated"}
+ expected_result = {"changed": True, "key": key_data, "msg": "key pair updated"}
result = ec2_key.handle_existing_key_pair_update(module, ec2_client, name, key)
@@ -549,27 +538,27 @@ def test_handle_existing_key_pair_update_key_type(m_update_key_pair_by_key_type)
assert m_update_key_pair_by_key_type.call_count == 1
-@patch(module_name + '.extract_key_data')
+@patch(module_name + ".extract_key_data")
def test_handle_existing_key_pair_else(m_extract_key_data):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
+ name = "my_keypair"
key = {
"KeyName": "my_keypair",
"KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"KeyPairId": "key-043046ef2a9a80b56",
"Tags": {},
- "KeyType": "rsa"
+ "KeyType": "rsa",
}
module.params = {
- 'key_material': "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
- 'force': False,
- 'key_type': 'rsa',
- 'tags': None,
- 'purge_tags': True,
- 'tag_spec': None
+ "key_material": "ssh-rsa AAAAB3NzaC1yc2EAA email@example.com",
+ "force": False,
+ "key_type": "rsa",
+ "tags": None,
+ "purge_tags": True,
+ "tag_spec": None,
}
m_extract_key_data.return_value = {
@@ -577,7 +566,7 @@ def test_handle_existing_key_pair_else(m_extract_key_data):
"fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
"id": "key-043046ef2a9a80b56",
"tags": {},
- "type": "rsa"
+ "type": "rsa",
}
expected_result = {"changed": False, "key": m_extract_key_data.return_value, "msg": "key pair already exists"}
@@ -588,55 +577,53 @@ def test_handle_existing_key_pair_else(m_extract_key_data):
assert m_extract_key_data.call_count == 1
-@patch(module_name + '._delete_key_pair')
-@patch(module_name + '.find_key_pair')
-def test_delete_key_pair_key_exists(m_find_key_pair, m_delete_key_pair):
+@patch(module_name + "._delete_key_pair")
+@patch(module_name + ".find_key_pair")
+def test_delete_key_pair_key_exists(m_find_key_pair, m_delete_key_pair, tmp_path):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
-
+ name = "my_keypair"
+ file_name = tmp_path / "private_key_data.pem"
module.check_mode = False
m_find_key_pair.return_value = {
- 'KeyPairs': [
+ "KeyPairs": [
{
- 'CreateTime': datetime.datetime(2022, 9, 15, 20, 10, 15, tzinfo=tzutc()),
- 'KeyFingerprint': '11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa',
- 'KeyName': 'my_keypair',
- 'KeyPairId': 'key-043046ef2a9a80b56',
- 'KeyType': 'rsa',
- 'Tags': []
+ "CreateTime": datetime.datetime(2022, 9, 15, 20, 10, 15, tzinfo=tzutc()),
+ "KeyFingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "KeyName": "my_keypair",
+ "KeyPairId": "key-043046ef2a9a80b56",
+ "KeyType": "rsa",
+ "Tags": [],
}
],
}
- expected_result = {'changed': True, 'key': None, 'msg': 'key deleted'}
-
result = ec2_key.delete_key_pair(module.check_mode, ec2_client, name)
assert m_find_key_pair.call_count == 1
m_find_key_pair.assert_called_with(ec2_client, name)
assert m_delete_key_pair.call_count == 1
m_delete_key_pair.assert_called_with(ec2_client, name)
- assert result == expected_result
+ assert result == {"changed": True, "key": None, "msg": "key deleted"}
-@patch(module_name + '._delete_key_pair')
-@patch(module_name + '.find_key_pair')
+@patch(module_name + "._delete_key_pair")
+@patch(module_name + ".find_key_pair")
def test_delete_key_pair_key_not_exist(m_find_key_pair, m_delete_key_pair):
module = MagicMock()
ec2_client = MagicMock()
- name = 'my_keypair'
-
+ name = "my_keypair"
+ file_name = "non_existing_file_path"
module.check_mode = False
m_find_key_pair.return_value = None
- expected_result = {'key': None, 'msg': 'key did not exist'}
+ expected_result = {"key": None, "msg": "key did not exist"}
- result = ec2_key.delete_key_pair(module.check_mode, ec2_client, name)
+ result = ec2_key.delete_key_pair(module.check_mode, ec2_client, name, file_name)
assert m_find_key_pair.call_count == 1
m_find_key_pair.assert_called_with(ec2_client, name)
@@ -644,6 +631,24 @@ def test_delete_key_pair_key_not_exist(m_find_key_pair, m_delete_key_pair):
assert result == expected_result
+def test__write_private_key(tmp_path):
+ key_data = {
+ "name": "my_keypair",
+ "fingerprint": "11:12:13:14:bb:26:85:b2:e8:39:27:bc:ee:aa:ff:ee:dd:cc:bb:aa",
+ "id": "key-043046ef2a9a80b56",
+ "tags": {},
+ "type": "rsa",
+ "private_key": "ABCDEFGH",
+ }
+ file_name = tmp_path / "id_rsa_key"
+ saved_key_data = copy.deepcopy(key_data)
+ result = ec2_key._write_private_key(key_data, str(file_name))
+
+ assert "private_key" not in result.keys()
+ del saved_key_data["private_key"]
+ assert saved_key_data == result
+
+
@patch(module_name + ".AnsibleAWSModule")
def test_main_success(m_AnsibleAWSModule):
m_module = MagicMock()
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_metadata_facts.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_metadata_facts.py
new file mode 100644
index 000000000..23ba85003
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_metadata_facts.py
@@ -0,0 +1,101 @@
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+import gzip
+import io
+from unittest.mock import MagicMock
+from unittest.mock import patch
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_metadata_facts
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_metadata_facts"
+
+
+class FailJson(Exception):
+ pass
+
+
+@pytest.fixture()
+def ec2_instance():
+ module = MagicMock()
+ return ec2_metadata_facts.Ec2Metadata(module)
+
+
+@patch(module_name + ".fetch_url")
+def test__fetch_401(m_fetch_url, ec2_instance):
+ ec2_instance.module.fail_json.side_effect = FailJson()
+ m_fetch_url.return_value = (None, {"status": 401, "msg": "Oops"})
+ with pytest.raises(FailJson):
+ ec2_instance._fetch("http://169.254.169.254/latest/meta-data/")
+
+
+@patch(module_name + ".fetch_url")
+def test__fetch_200(m_fetch_url, ec2_instance):
+ m_fetch_url.return_value = (io.StringIO("my-value"), {"status": 200})
+ assert ec2_instance._fetch("http://169.254.169.254/latest/meta-data/ami-id") == "my-value"
+
+ m_fetch_url.return_value = (io.StringIO("1"), {"status": 200})
+ assert ec2_instance._fetch("http://169.254.169.254/latest/meta-data/ami-id") == "1"
+
+
+@patch(module_name + ".fetch_url")
+def test_fetch(m_fetch_url, ec2_instance):
+ raw_list = "ami-id\n"
+ m_fetch_url.side_effect = [
+ (io.StringIO(raw_list), {"status": 200}),
+ (io.StringIO("my-value"), {"status": 200}),
+ ]
+ ec2_instance.fetch("http://169.254.169.254/latest/meta-data/")
+ assert ec2_instance._data == {"http://169.254.169.254/latest/meta-data/ami-id": "my-value"}
+
+
+@patch(module_name + ".fetch_url")
+def test_fetch_recusive(m_fetch_url, ec2_instance):
+ raw_list = "whatever/\n"
+ m_fetch_url.side_effect = [
+ (io.StringIO(raw_list), {"status": 200}),
+ (io.StringIO("my-key"), {"status": 200}),
+ (io.StringIO("my-value"), {"status": 200}),
+ ]
+ ec2_instance.fetch("http://169.254.169.254/latest/meta-data/")
+ assert ec2_instance._data == {"http://169.254.169.254/latest/meta-data/whatever/my-key": "my-value"}
+
+
+@patch(module_name + ".fetch_url")
+def test__fetch_user_data_compressed(m_fetch_url, ec2_instance):
+ user_data = b"""Content-Type: multipart/mixed; boundary="MIMEBOUNDARY"
+MIME-Version: 1.0
+
+--MIMEBOUNDARY
+Content-Transfer-Encoding: 7bit
+Content-Type: text/cloud-config
+Mime-Version: 1.0
+
+packages: ['httpie']
+
+--MIMEBOUNDARY--
+"""
+
+ m_fetch_url.return_value = (io.BytesIO(gzip.compress(user_data)), {"status": 200})
+ assert ec2_instance._fetch("http://169.254.169.254/latest/user-data") == user_data.decode("utf-8")
+
+
+@patch(module_name + ".fetch_url")
+def test__fetch_user_data_plain(m_fetch_url, ec2_instance):
+ user_data = b"""Content-Type: multipart/mixed; boundary="MIMEBOUNDARY"
+MIME-Version: 1.0
+
+--MIMEBOUNDARY
+Content-Transfer-Encoding: 7bit
+Content-Type: text/cloud-config
+Mime-Version: 1.0
+
+packages: ['httpie']
+
+--MIMEBOUNDARY--
+"""
+
+ m_fetch_url.return_value = (io.BytesIO(user_data), {"status": 200})
+ assert ec2_instance._fetch("http://169.254.169.254/latest/user-data") == user_data.decode("utf-8")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_security_group.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_security_group.py
index 1ebbe86c6..c47122657 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_security_group.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_security_group.py
@@ -1,83 +1,59 @@
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
from ansible_collections.amazon.aws.plugins.modules import ec2_security_group as group_module
def test_from_permission():
internal_http = {
- 'FromPort': 80,
- 'IpProtocol': 'tcp',
- 'IpRanges': [
- {
- 'CidrIp': '10.0.0.0/8',
- 'Description': 'Foo Bar Baz'
- },
+ "FromPort": 80,
+ "IpProtocol": "tcp",
+ "IpRanges": [
+ {"CidrIp": "10.0.0.0/8", "Description": "Foo Bar Baz"},
],
- 'Ipv6Ranges': [
- {'CidrIpv6': 'fe80::94cc:8aff:fef6:9cc/64'},
+ "Ipv6Ranges": [
+ {"CidrIpv6": "fe80::94cc:8aff:fef6:9cc/64"},
],
- 'PrefixListIds': [],
- 'ToPort': 80,
- 'UserIdGroupPairs': [],
+ "PrefixListIds": [],
+ "ToPort": 80,
+ "UserIdGroupPairs": [],
}
perms = list(group_module.rule_from_group_permission(internal_http))
assert len(perms) == 2
- assert perms[0].target == '10.0.0.0/8'
- assert perms[0].target_type == 'ipv4'
- assert perms[0].description == 'Foo Bar Baz'
- assert perms[1].target == 'fe80::94cc:8aff:fef6:9cc/64'
+ assert perms[0].target == "10.0.0.0/8"
+ assert perms[0].target_type == "ipv4"
+ assert perms[0].description == "Foo Bar Baz"
+ assert perms[1].target == "fe80::94cc:8aff:fef6:9cc/64"
global_egress = {
- 'IpProtocol': '-1',
- 'IpRanges': [{'CidrIp': '0.0.0.0/0'}],
- 'Ipv6Ranges': [],
- 'PrefixListIds': [],
- 'UserIdGroupPairs': []
+ "IpProtocol": "-1",
+ "IpRanges": [{"CidrIp": "0.0.0.0/0"}],
+ "Ipv6Ranges": [],
+ "PrefixListIds": [],
+ "UserIdGroupPairs": [],
}
perms = list(group_module.rule_from_group_permission(global_egress))
assert len(perms) == 1
- assert perms[0].target == '0.0.0.0/0'
+ assert perms[0].target == "0.0.0.0/0"
assert perms[0].port_range == (None, None)
internal_prefix_http = {
- 'FromPort': 80,
- 'IpProtocol': 'tcp',
- 'PrefixListIds': [
- {'PrefixListId': 'p-1234'}
- ],
- 'ToPort': 80,
- 'UserIdGroupPairs': [],
+ "FromPort": 80,
+ "IpProtocol": "tcp",
+ "PrefixListIds": [{"PrefixListId": "p-1234"}],
+ "ToPort": 80,
+ "UserIdGroupPairs": [],
}
perms = list(group_module.rule_from_group_permission(internal_prefix_http))
assert len(perms) == 1
- assert perms[0].target == 'p-1234'
+ assert perms[0].target == "p-1234"
def test_rule_to_permission():
tests = [
- group_module.Rule((22, 22), 'udp', 'sg-1234567890', 'group', None),
- group_module.Rule((1, 65535), 'tcp', '0.0.0.0/0', 'ipv4', "All TCP from everywhere"),
- group_module.Rule((443, 443), 'tcp', 'ip-123456', 'ip_prefix', "Traffic to privatelink IPs"),
- group_module.Rule((443, 443), 'tcp', 'feed:dead:::beef/64', 'ipv6', None),
+ group_module.Rule((22, 22), "udp", "sg-1234567890", "group", None),
+ group_module.Rule((1, 65535), "tcp", "0.0.0.0/0", "ipv4", "All TCP from everywhere"),
+ group_module.Rule((443, 443), "tcp", "ip-123456", "ip_prefix", "Traffic to privatelink IPs"),
+ group_module.Rule((443, 443), "tcp", "feed:dead:::beef/64", "ipv6", None),
]
for test in tests:
perm = group_module.to_permission(test)
- assert perm['FromPort'], perm['ToPort'] == test.port_range
- assert perm['IpProtocol'] == test.protocol
-
-
-def test_validate_ip():
- class Warner(object):
- def warn(self, msg):
- return
- ips = [
- ('10.1.1.1/24', '10.1.1.0/24'),
- ('192.168.56.101/16', '192.168.0.0/16'),
- # Don't modify IPv6 CIDRs, AWS supports /128 and device ranges
- ('fc00:8fe0:fe80:b897:8990:8a7c:99bf:323d/128', 'fc00:8fe0:fe80:b897:8990:8a7c:99bf:323d/128'),
- ]
-
- for ip, net in ips:
- assert group_module.validate_ip(Warner(), ip) == net
+ assert perm["FromPort"], perm["ToPort"] == test.port_range
+ assert perm["IpProtocol"] == test.protocol
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_snapshot_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_snapshot_info.py
new file mode 100644
index 000000000..34767d38a
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_snapshot_info.py
@@ -0,0 +1,128 @@
+# (c) 2022 Red Hat Inc.
+
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import ec2_snapshot_info
+
+module_name = "ansible_collections.amazon.aws.plugins.modules.ec2_snapshot_info"
+
+
+@pytest.mark.parametrize(
+ "snapshot_ids,owner_ids,restorable_by_user_ids,filters,max_results,next_token_id,expected",
+ [([], [], [], {}, None, None, {})],
+)
+def test_build_request_args(
+ snapshot_ids, owner_ids, restorable_by_user_ids, filters, max_results, next_token_id, expected
+):
+ assert (
+ ec2_snapshot_info.build_request_args(
+ snapshot_ids, owner_ids, restorable_by_user_ids, filters, max_results, next_token_id
+ )
+ == expected
+ )
+
+
+def test_get_snapshots():
+ module = MagicMock()
+ connection = MagicMock()
+
+ connection.describe_snapshots.return_value = {
+ "Snapshots": [
+ {
+ "Description": "Created by CreateImage(i-083b9dd1234567890) for ami-01486e111234567890",
+ "Encrypted": False,
+ "OwnerId": "123456789000",
+ "Progress": "100%",
+ "SnapshotId": "snap-0f00cba1234567890",
+ "StartTime": "2021-09-30T01:04:49.724000+00:00",
+ "State": "completed",
+ "StorageTier": "standard",
+ "Tags": [
+ {"Key": "TagKey", "Value": "TagValue"},
+ ],
+ "VolumeId": "vol-0ae6c5e1234567890",
+ "VolumeSize": 10,
+ },
+ {
+ "Description": "Created by CreateImage(i-083b9dd1234567890) for ami-01486e111234567890",
+ "Encrypted": False,
+ "OwnerId": "123456789000",
+ "Progress": "100%",
+ "SnapshotId": "snap-0f00cba1234567890",
+ "StartTime": "2021-09-30T01:04:49.724000+00:00",
+ "State": "completed",
+ "StorageTier": "standard",
+ "Tags": [
+ {"Key": "TagKey", "Value": "TagValue"},
+ ],
+ "VolumeId": "vol-0ae6c5e1234567890",
+ "VolumeSize": 10,
+ },
+ ]
+ }
+
+ request_args = {"SnapshotIds": ["snap-0f00cba1234567890"]}
+
+ snapshot_info = ec2_snapshot_info.get_snapshots(connection, module, request_args)
+
+ assert connection.describe_snapshots.call_count == 1
+ connection.describe_snapshots.assert_called_with(aws_retry=True, SnapshotIds=["snap-0f00cba1234567890"])
+ assert len(snapshot_info["Snapshots"]) == 2
+
+
+@patch(module_name + ".build_request_args")
+@patch(module_name + ".get_snapshots")
+def test_list_ec2_snapshots(m_get_snapshots, m_build_request_args):
+ module = MagicMock()
+ connection = MagicMock()
+
+ m_get_snapshots.return_value = {
+ "Snapshots": [
+ {
+ "Description": "Created by CreateImage(i-083b9dd1234567890) for ami-01486e111234567890",
+ "Encrypted": False,
+ "OwnerId": "123456789000",
+ "Progress": "100%",
+ "SnapshotId": "snap-0f00cba1234567890",
+ "StartTime": "2021-09-30T01:04:49.724000+00:00",
+ "State": "completed",
+ "StorageTier": "standard",
+ "Tags": [
+ {"Key": "TagKey", "Value": "TagValue"},
+ ],
+ "VolumeId": "vol-0ae6c5e1234567890",
+ "VolumeSize": 10,
+ }
+ ]
+ }
+
+ m_build_request_args.return_value = {"SnapshotIds": ["snap-0f00cba1234567890"]}
+
+ request_args = ec2_snapshot_info.build_request_args()
+
+ ec2_snapshot_info.list_ec2_snapshots(connection, module, request_args)
+
+ assert m_get_snapshots.call_count == 1
+ m_get_snapshots.assert_has_calls(
+ [
+ call(connection, module, m_build_request_args.return_value),
+ ]
+ )
+
+
+@patch(module_name + ".AnsibleAWSModule")
+def test_main_success(m_AnsibleAWSModule):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+
+ ec2_snapshot_info.main()
+
+ m_module.client.assert_called_with("ec2", retry_decorator=ANY)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_vpc_dhcp_option.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_vpc_dhcp_option.py
index 73726590f..27517115e 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_vpc_dhcp_option.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_ec2_vpc_dhcp_option.py
@@ -3,66 +3,71 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-# Magic... Incorrectly identified by pylint as unused
-from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import placeboify # pylint: disable=unused-import
-from ansible_collections.amazon.aws.tests.unit.compat.mock import patch
+from unittest.mock import patch
from ansible_collections.amazon.aws.plugins.modules import ec2_vpc_dhcp_option as dhcp_module
from ansible_collections.amazon.aws.tests.unit.plugins.modules.utils import ModuleTestCase
-test_module_params = {'domain_name': 'us-west-2.compute.internal',
- 'dns_servers': ['AmazonProvidedDNS'],
- 'ntp_servers': ['10.10.2.3', '10.10.4.5'],
- 'netbios_name_servers': ['10.20.2.3', '10.20.4.5'],
- 'netbios_node_type': 2}
-
-test_create_config = [{'Key': 'domain-name', 'Values': [{'Value': 'us-west-2.compute.internal'}]},
- {'Key': 'domain-name-servers', 'Values': [{'Value': 'AmazonProvidedDNS'}]},
- {'Key': 'ntp-servers', 'Values': [{'Value': '10.10.2.3'}, {'Value': '10.10.4.5'}]},
- {'Key': 'netbios-name-servers', 'Values': [{'Value': '10.20.2.3'}, {'Value': '10.20.4.5'}]},
- {'Key': 'netbios-node-type', 'Values': 2}]
-
-
-test_create_option_set = [{'Key': 'domain-name', 'Values': ['us-west-2.compute.internal']},
- {'Key': 'domain-name-servers', 'Values': ['AmazonProvidedDNS']},
- {'Key': 'ntp-servers', 'Values': ['10.10.2.3', '10.10.4.5']},
- {'Key': 'netbios-name-servers', 'Values': ['10.20.2.3', '10.20.4.5']},
- {'Key': 'netbios-node-type', 'Values': ['2']}]
-
-test_normalize_config = {'domain-name': ['us-west-2.compute.internal'],
- 'domain-name-servers': ['AmazonProvidedDNS'],
- 'ntp-servers': ['10.10.2.3', '10.10.4.5'],
- 'netbios-name-servers': ['10.20.2.3', '10.20.4.5'],
- 'netbios-node-type': '2'
- }
-
-
-class FakeModule(object):
+# Magic... Incorrectly identified by pylint as unused
+# pylint: disable-next=unused-import
+from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import placeboify
+
+test_module_params = {
+ "domain_name": "us-west-2.compute.internal",
+ "dns_servers": ["AmazonProvidedDNS"],
+ "ntp_servers": ["10.10.2.3", "10.10.4.5"],
+ "netbios_name_servers": ["10.20.2.3", "10.20.4.5"],
+ "netbios_node_type": 2,
+}
+
+test_create_config = [
+ {"Key": "domain-name", "Values": [{"Value": "us-west-2.compute.internal"}]},
+ {"Key": "domain-name-servers", "Values": [{"Value": "AmazonProvidedDNS"}]},
+ {"Key": "ntp-servers", "Values": [{"Value": "10.10.2.3"}, {"Value": "10.10.4.5"}]},
+ {"Key": "netbios-name-servers", "Values": [{"Value": "10.20.2.3"}, {"Value": "10.20.4.5"}]},
+ {"Key": "netbios-node-type", "Values": 2},
+]
+
+
+test_create_option_set = [
+ {"Key": "domain-name", "Values": ["us-west-2.compute.internal"]},
+ {"Key": "domain-name-servers", "Values": ["AmazonProvidedDNS"]},
+ {"Key": "ntp-servers", "Values": ["10.10.2.3", "10.10.4.5"]},
+ {"Key": "netbios-name-servers", "Values": ["10.20.2.3", "10.20.4.5"]},
+ {"Key": "netbios-node-type", "Values": ["2"]},
+]
+
+test_normalize_config = {
+ "domain-name": ["us-west-2.compute.internal"],
+ "domain-name-servers": ["AmazonProvidedDNS"],
+ "ntp-servers": ["10.10.2.3", "10.10.4.5"],
+ "netbios-name-servers": ["10.20.2.3", "10.20.4.5"],
+ "netbios-node-type": "2",
+}
+
+
+class FakeModule:
def __init__(self, **kwargs):
self.params = kwargs
def fail_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('FAIL')
+ raise Exception("FAIL")
def fail_json_aws(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('FAIL')
+ raise Exception("FAIL")
def exit_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
- raise Exception('EXIT')
+ raise Exception("EXIT")
-@patch.object(dhcp_module.AnsibleAWSModule, 'client')
+@patch.object(dhcp_module.AnsibleAWSModule, "client")
class TestDhcpModule(ModuleTestCase):
-
def test_normalize_config(self, client_mock):
result = dhcp_module.normalize_ec2_vpc_dhcp_config(test_create_config)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_kms_key.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_kms_key.py
index 5a53e2ddb..b2d8e0b50 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_kms_key.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_kms_key.py
@@ -4,12 +4,11 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-import pytest
+from unittest.mock import MagicMock
+from unittest.mock import patch
-from unittest.mock import MagicMock, call, patch
from ansible_collections.amazon.aws.plugins.modules import kms_key
-
module_name = "ansible_collections.amazon.aws.plugins.modules.kms_key"
key_details = {
"KeyMetadata": {
@@ -59,7 +58,6 @@ key_details = {
@patch(module_name + ".get_kms_metadata_with_backoff")
def test_fetch_key_metadata(m_get_kms_metadata_with_backoff):
-
module = MagicMock()
kms_client = MagicMock()
@@ -69,14 +67,8 @@ def test_fetch_key_metadata(m_get_kms_metadata_with_backoff):
def test_validate_params():
-
module = MagicMock()
- module.params = {
- "state": "present",
- "multi_region": True
- }
+ module.params = {"state": "present", "multi_region": True}
result = kms_key.validate_params(module, key_details["KeyMetadata"])
- module.fail_json.assert_called_with(
- msg="You cannot change the multi-region property on an existing key."
- )
+ module.fail_json.assert_called_with(msg="You cannot change the multi-region property on an existing key.")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer.py
index 451a61766..cd3032ef7 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer.py
@@ -4,12 +4,12 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
import pytest
-from unittest.mock import MagicMock, call, patch
from ansible_collections.amazon.aws.plugins.modules import lambda_layer
@@ -19,155 +19,120 @@ def raise_lambdalayer_exception(e=None, m=None):
return lambda_layer.LambdaLayerFailure(exc=e, msg=m)
-mod_list_layer = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer.list_layer_versions'
-mod_create_layer = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer.create_layer_version'
-mod_delete_layer = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer.delete_layer_version'
+mod_list_layer = "ansible_collections.amazon.aws.plugins.modules.lambda_layer.list_layer_versions"
+mod_create_layer = "ansible_collections.amazon.aws.plugins.modules.lambda_layer.create_layer_version"
+mod_delete_layer = "ansible_collections.amazon.aws.plugins.modules.lambda_layer.delete_layer_version"
@pytest.mark.parametrize(
- "params,api_result,calls,ansible_result",
+ "params,api_result,calls,_ansible_result",
[
+ ({"name": "testlayer", "version": 4}, [], [], {"changed": False, "layer_versions": []}),
(
- {
- "name": "testlayer",
- "version": 4
- },
- [],
- [],
- {"changed": False, "layer_versions": []}
- ),
- (
- {
- "name": "testlayer",
- "version": 4
- },
+ {"name": "testlayer", "version": 4},
[
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
+ "version": 1,
+ },
],
[],
- {"changed": False, "layer_versions": []}
+ {"changed": False, "layer_versions": []},
),
(
- {
- "name": "testlayer",
- "version": 2
- },
+ {"name": "testlayer", "version": 2},
[
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
- ],
- [
- call(LayerName='testlayer', VersionNumber=2)
+ "version": 1,
+ },
],
+ [call(LayerName="testlayer", VersionNumber=2)],
{
"changed": True,
"layer_versions": [
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
}
- ]
- }
+ ],
+ },
),
(
- {
- "name": "testlayer",
- "version": -1
- },
+ {"name": "testlayer", "version": -1},
[
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
- ],
- [
- call(LayerName='testlayer', VersionNumber=2),
- call(LayerName='testlayer', VersionNumber=1)
+ "version": 1,
+ },
],
+ [call(LayerName="testlayer", VersionNumber=2), call(LayerName="testlayer", VersionNumber=1)],
{
"changed": True,
"layer_versions": [
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
- ]
- }
- )
- ]
+ "version": 1,
+ },
+ ],
+ },
+ ),
+ ],
)
@patch(mod_list_layer)
-def test_delete_layer(m_list_layer, params, api_result, calls, ansible_result):
-
+def test_delete_layer(m_list_layer, params, api_result, calls, _ansible_result):
lambda_client = MagicMock()
lambda_client.delete_layer_version.return_value = None
m_list_layer.return_value = api_result
result = lambda_layer.delete_layer_version(lambda_client, params)
- assert result == ansible_result
+ assert result == _ansible_result
- m_list_layer.assert_called_once_with(
- lambda_client, params.get("name")
- )
+ m_list_layer.assert_called_once_with(lambda_client, params.get("name"))
if not calls:
lambda_client.delete_layer_version.assert_not_called()
@@ -177,62 +142,54 @@ def test_delete_layer(m_list_layer, params, api_result, calls, ansible_result):
@patch(mod_list_layer)
def test_delete_layer_check_mode(m_list_layer):
-
lambda_client = MagicMock()
lambda_client.delete_layer_version.return_value = None
m_list_layer.return_value = [
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
+ "version": 1,
+ },
]
params = {"name": "testlayer", "version": -1}
result = lambda_layer.delete_layer_version(lambda_client, params, check_mode=True)
- ansible_result = {
+ _ansible_result = {
"changed": True,
"layer_versions": [
{
- 'compatible_runtimes': ["python3.7"],
- 'created_date': "2022-09-29T10:31:35.977+0000",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
+ "compatible_runtimes": ["python3.7"],
+ "created_date": "2022-09-29T10:31:35.977+0000",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:2",
"license_info": "MIT",
- 'version': 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "version": 2,
+ "compatible_architectures": ["arm64"],
},
{
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
- }
- ]
+ "version": 1,
+ },
+ ],
}
- assert result == ansible_result
+ assert result == _ansible_result
- m_list_layer.assert_called_once_with(
- lambda_client, params.get("name")
- )
+ m_list_layer.assert_called_once_with(lambda_client, params.get("name"))
lambda_client.delete_layer_version.assert_not_called()
@patch(mod_list_layer)
def test_delete_layer_failure(m_list_layer):
-
lambda_client = MagicMock()
lambda_client.delete_layer_version.side_effect = raise_lambdalayer_exception()
@@ -241,7 +198,7 @@ def test_delete_layer_failure(m_list_layer):
"created_date": "2022-09-29T10:31:26.341+0000",
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:testlayer:1",
- "version": 1
+ "version": 1,
}
]
params = {"name": "testlayer", "version": 1}
@@ -249,42 +206,38 @@ def test_delete_layer_failure(m_list_layer):
lambda_layer.delete_layer_version(lambda_client, params)
-@pytest.mark.parametrize(
- "b_s3content",
- [
- (True),
- (False)
- ]
-)
+@pytest.mark.parametrize("b_s3content", [(True), (False)])
@patch(mod_list_layer)
def test_create_layer(m_list_layer, b_s3content, tmp_path):
params = {
"name": "testlayer",
"description": "ansible units testing sample layer",
"content": {},
- "license_info": "MIT"
+ "license_info": "MIT",
}
lambda_client = MagicMock()
lambda_client.publish_layer_version.return_value = {
- 'CompatibleRuntimes': [
- 'python3.6',
- 'python3.7',
+ "CompatibleRuntimes": [
+ "python3.6",
+ "python3.7",
],
- 'Content': {
- 'CodeSha256': 'tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=',
- 'CodeSize': 169,
- 'Location': 'https://awslambda-us-west-2-layers.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb',
+ "Content": {
+ "CodeSha256": "tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=",
+ "CodeSize": 169,
+ "Location": (
+ "https://awslambda-us-west-2-layers.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb"
+ ),
},
- 'CreatedDate': '2018-11-14T23:03:52.894+0000',
- 'Description': "ansible units testing sample layer",
- 'LayerArn': 'arn:aws:lambda:us-west-2:123456789012:layer:my-layer',
- 'LayerVersionArn': 'arn:aws:lambda:us-west-2:123456789012:layer:testlayer:1',
- 'LicenseInfo': 'MIT',
- 'Version': 1,
- 'ResponseMetadata': {
- 'http_header': 'true',
+ "CreatedDate": "2018-11-14T23:03:52.894+0000",
+ "Description": "ansible units testing sample layer",
+ "LayerArn": "arn:aws:lambda:us-west-2:123456789012:layer:my-layer",
+ "LayerVersionArn": "arn:aws:lambda:us-west-2:123456789012:layer:testlayer:1",
+ "LicenseInfo": "MIT",
+ "Version": 1,
+ "ResponseMetadata": {
+ "http_header": "true",
},
}
@@ -292,33 +245,25 @@ def test_create_layer(m_list_layer, b_s3content, tmp_path):
"changed": True,
"layer_versions": [
{
- 'compatible_runtimes': ['python3.6', 'python3.7'],
- 'content': {
- 'code_sha256': 'tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=',
- 'code_size': 169,
- 'location': 'https://awslambda-us-west-2-layers.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb'
+ "compatible_runtimes": ["python3.6", "python3.7"],
+ "content": {
+ "code_sha256": "tv9jJO+rPbXUUXuRKi7CwHzKtLDkDRJLB3cC3Z/ouXo=",
+ "code_size": 169,
+ "location": "https://awslambda-us-west-2-layers.s3.us-west-2.amazonaws.com/snapshots/123456789012/my-layer-4aaa2fbb",
},
- 'created_date': '2018-11-14T23:03:52.894+0000',
- 'description': 'ansible units testing sample layer',
- 'layer_arn': 'arn:aws:lambda:us-west-2:123456789012:layer:my-layer',
- 'layer_version_arn': 'arn:aws:lambda:us-west-2:123456789012:layer:testlayer:1',
- 'license_info': 'MIT',
- 'version': 1
+ "created_date": "2018-11-14T23:03:52.894+0000",
+ "description": "ansible units testing sample layer",
+ "layer_arn": "arn:aws:lambda:us-west-2:123456789012:layer:my-layer",
+ "layer_version_arn": "arn:aws:lambda:us-west-2:123456789012:layer:testlayer:1",
+ "license_info": "MIT",
+ "version": 1,
}
- ]
+ ],
}
if b_s3content:
- params["content"] = {
- "s3_bucket": "mybucket",
- "s3_key": "mybucket-key",
- "s3_object_version": "v1"
- }
- content_arg = {
- "S3Bucket": "mybucket",
- "S3Key": "mybucket-key",
- "S3ObjectVersion": "v1"
- }
+ params["content"] = {"s3_bucket": "mybucket", "s3_key": "mybucket-key", "s3_object_version": "v1"}
+ content_arg = {"S3Bucket": "mybucket", "S3Key": "mybucket-key", "S3ObjectVersion": "v1"}
else:
binary_data = b"simple lambda layer content"
test_dir = tmp_path / "lambda_layer"
@@ -350,12 +295,8 @@ def test_create_layer_check_mode(m_list_layer):
params = {
"name": "testlayer",
"description": "ansible units testing sample layer",
- "content": {
- "s3_bucket": "mybucket",
- "s3_key": "mybucket-key",
- "s3_object_version": "v1"
- },
- "license_info": "MIT"
+ "content": {"s3_bucket": "mybucket", "s3_key": "mybucket-key", "s3_object_version": "v1"},
+ "license_info": "MIT",
}
lambda_client = MagicMock()
@@ -371,19 +312,9 @@ def test_create_layer_failure():
params = {
"name": "testlayer",
"description": "ansible units testing sample layer",
- "content": {
- "s3_bucket": "mybucket",
- "s3_key": "mybucket-key",
- "s3_object_version": "v1"
- },
- "compatible_runtimes": [
- "nodejs",
- "python3.9"
- ],
- "compatible_architectures": [
- 'x86_64',
- 'arm64'
- ]
+ "content": {"s3_bucket": "mybucket", "s3_key": "mybucket-key", "s3_object_version": "v1"},
+ "compatible_runtimes": ["nodejs", "python3.9"],
+ "compatible_architectures": ["x86_64", "arm64"],
}
lambda_client = MagicMock()
lambda_client.publish_layer_version.side_effect = raise_lambdalayer_exception()
@@ -399,14 +330,8 @@ def test_create_layer_using_unexisting_file():
"content": {
"zip_file": "this_file_does_not_exist",
},
- "compatible_runtimes": [
- "nodejs",
- "python3.9"
- ],
- "compatible_architectures": [
- 'x86_64',
- 'arm64'
- ]
+ "compatible_runtimes": ["nodejs", "python3.9"],
+ "compatible_architectures": ["x86_64", "arm64"],
}
lambda_client = MagicMock()
@@ -421,28 +346,15 @@ def test_create_layer_using_unexisting_file():
@pytest.mark.parametrize(
"params,failure",
[
- (
- {"name": "test-layer"},
- False
- ),
- (
- {"name": "test-layer", "state": "absent"},
- False
- ),
- (
- {"name": "test-layer"},
- True
- ),
- (
- {"name": "test-layer", "state": "absent"},
- True
- ),
- ]
+ ({"name": "test-layer"}, False),
+ ({"name": "test-layer", "state": "absent"}, False),
+ ({"name": "test-layer"}, True),
+ ({"name": "test-layer", "state": "absent"}, True),
+ ],
)
@patch(mod_create_layer)
@patch(mod_delete_layer)
def test_execute_module(m_delete_layer, m_create_layer, params, failure):
-
module = MagicMock()
module.params = params
module.check_mode = False
@@ -462,9 +374,7 @@ def test_execute_module(m_delete_layer, m_create_layer, params, failure):
module.exit_json.assert_called_with(**result)
module.fail_json_aws.assert_not_called()
- m_create_layer.assert_called_with(
- lambda_client, params, module.check_mode
- )
+ m_create_layer.assert_called_with(lambda_client, params, module.check_mode)
m_delete_layer.assert_not_called()
elif state == "absent":
@@ -474,9 +384,7 @@ def test_execute_module(m_delete_layer, m_create_layer, params, failure):
module.exit_json.assert_called_with(**result)
module.fail_json_aws.assert_not_called()
- m_delete_layer.assert_called_with(
- lambda_client, params, module.check_mode
- )
+ m_delete_layer.assert_called_with(lambda_client, params, module.check_mode)
m_create_layer.assert_not_called()
else:
exc = "lambdalayer_execute_module_exception"
@@ -488,6 +396,4 @@ def test_execute_module(m_delete_layer, m_create_layer, params, failure):
lambda_layer.execute_module(module, lambda_client)
module.exit_json.assert_not_called()
- module.fail_json_aws.assert_called_with(
- exc, msg=msg
- )
+ module.fail_json_aws.assert_called_with(exc, msg=msg)
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer_info.py
index 25a1f15ac..201625401 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer_info.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_lambda_layer_info.py
@@ -4,104 +4,85 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
import pytest
from botocore.exceptions import BotoCoreError
-from unittest.mock import MagicMock, call, patch
from ansible_collections.amazon.aws.plugins.modules import lambda_layer_info
-
-mod__list_layer_versions = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer_info._list_layer_versions'
-mod__list_layers = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer_info._list_layers'
-mod_list_layer_versions = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer_info.list_layer_versions'
-mod_list_layers = 'ansible_collections.amazon.aws.plugins.modules.lambda_layer_info.list_layers'
+mod__list_layer_versions = "ansible_collections.amazon.aws.plugins.modules.lambda_layer_info._list_layer_versions"
+mod__list_layers = "ansible_collections.amazon.aws.plugins.modules.lambda_layer_info._list_layers"
+mod_list_layer_versions = "ansible_collections.amazon.aws.plugins.modules.lambda_layer_info.list_layer_versions"
+mod_list_layers = "ansible_collections.amazon.aws.plugins.modules.lambda_layer_info.list_layers"
list_layers_paginate_result = {
- 'NextMarker': '002',
- 'Layers': [
+ "NextMarker": "002",
+ "Layers": [
{
- 'LayerName': "test-layer-01",
- 'LayerArn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01",
- 'LatestMatchingVersion': {
- 'LayerVersionArn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01:1",
- 'Version': 1,
- 'Description': "lambda layer created for unit tests",
- 'CreatedDate': "2022-09-29T10:31:26.341+0000",
- 'CompatibleRuntimes': [
- 'nodejs',
- 'nodejs4.3',
- 'nodejs6.10'
- ],
- 'LicenseInfo': 'MIT',
- 'CompatibleArchitectures': [
- 'arm64'
- ]
- }
+ "LayerName": "test-layer-01",
+ "LayerArn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01",
+ "LatestMatchingVersion": {
+ "LayerVersionArn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01:1",
+ "Version": 1,
+ "Description": "lambda layer created for unit tests",
+ "CreatedDate": "2022-09-29T10:31:26.341+0000",
+ "CompatibleRuntimes": ["nodejs", "nodejs4.3", "nodejs6.10"],
+ "LicenseInfo": "MIT",
+ "CompatibleArchitectures": ["arm64"],
+ },
},
{
- 'LayerName': "test-layer-02",
- 'LayerArn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02",
- 'LatestMatchingVersion': {
- 'LayerVersionArn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02:1",
- 'Version': 1,
- 'CreatedDate': "2022-09-29T10:31:26.341+0000",
- 'CompatibleArchitectures': [
- 'arm64'
- ]
- }
+ "LayerName": "test-layer-02",
+ "LayerArn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02",
+ "LatestMatchingVersion": {
+ "LayerVersionArn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02:1",
+ "Version": 1,
+ "CreatedDate": "2022-09-29T10:31:26.341+0000",
+ "CompatibleArchitectures": ["arm64"],
+ },
},
],
- 'ResponseMetadata': {
- 'http': 'true',
+ "ResponseMetadata": {
+ "http": "true",
},
}
list_layers_result = [
{
- 'layer_name': "test-layer-01",
- 'layer_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01:1",
- 'version': 1,
- 'description': "lambda layer created for unit tests",
- 'created_date': "2022-09-29T10:31:26.341+0000",
- 'compatible_runtimes': [
- 'nodejs',
- 'nodejs4.3',
- 'nodejs6.10'
- ],
- 'license_info': 'MIT',
- 'compatible_architectures': [
- 'arm64'
- ]
+ "layer_name": "test-layer-01",
+ "layer_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-01:1",
+ "version": 1,
+ "description": "lambda layer created for unit tests",
+ "created_date": "2022-09-29T10:31:26.341+0000",
+ "compatible_runtimes": ["nodejs", "nodejs4.3", "nodejs6.10"],
+ "license_info": "MIT",
+ "compatible_architectures": ["arm64"],
},
{
- 'layer_name': "test-layer-02",
- 'layer_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02",
- 'layer_version_arn': "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02:1",
- 'version': 1,
- 'created_date': "2022-09-29T10:31:26.341+0000",
- 'compatible_architectures': [
- 'arm64'
- ]
- }
+ "layer_name": "test-layer-02",
+ "layer_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02",
+ "layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:test-layer-02:1",
+ "version": 1,
+ "created_date": "2022-09-29T10:31:26.341+0000",
+ "compatible_architectures": ["arm64"],
+ },
]
list_layers_versions_paginate_result = {
- 'LayerVersions': [
+ "LayerVersions": [
{
- 'CompatibleRuntimes': ["python3.7"],
- 'CreatedDate': "2022-09-29T10:31:35.977+0000",
- 'LayerVersionArn': "arn:aws:lambda:eu-west-2:123456789012:layer:layer-01:2",
+ "CompatibleRuntimes": ["python3.7"],
+ "CreatedDate": "2022-09-29T10:31:35.977+0000",
+ "LayerVersionArn": "arn:aws:lambda:eu-west-2:123456789012:layer:layer-01:2",
"LicenseInfo": "MIT",
- 'Version': 2,
- 'CompatibleArchitectures': [
- 'arm64'
- ]
+ "Version": 2,
+ "CompatibleArchitectures": ["arm64"],
},
{
"CompatibleRuntimes": ["python3.7"],
@@ -109,13 +90,13 @@ list_layers_versions_paginate_result = {
"Description": "lambda layer first version",
"LayerVersionArn": "arn:aws:lambda:eu-west-2:123456789012:layer:layer-01:1",
"LicenseInfo": "GPL-3.0-only",
- "Version": 1
- }
+ "Version": 1,
+ },
],
- 'ResponseMetadata': {
- 'http': 'true',
+ "ResponseMetadata": {
+ "http": "true",
},
- 'NextMarker': '001',
+ "NextMarker": "001",
}
@@ -126,9 +107,7 @@ list_layers_versions_result = [
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:layer-01:2",
"license_info": "MIT",
"version": 2,
- 'compatible_architectures': [
- 'arm64'
- ]
+ "compatible_architectures": ["arm64"],
},
{
"compatible_runtimes": ["python3.7"],
@@ -136,8 +115,8 @@ list_layers_versions_result = [
"description": "lambda layer first version",
"layer_version_arn": "arn:aws:lambda:eu-west-2:123456789012:layer:layer-01:1",
"license_info": "GPL-3.0-only",
- "version": 1
- }
+ "version": 1,
+ },
]
@@ -145,14 +124,8 @@ list_layers_versions_result = [
"params,call_args",
[
(
- {
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- },
- {
- "CompatibleRuntime": "nodejs",
- "CompatibleArchitecture": "arm64"
- }
+ {"compatible_runtime": "nodejs", "compatible_architecture": "arm64"},
+ {"CompatibleRuntime": "nodejs", "CompatibleArchitecture": "arm64"},
),
(
{
@@ -160,34 +133,20 @@ list_layers_versions_result = [
},
{
"CompatibleRuntime": "nodejs",
- }
- ),
- (
- {
- "compatible_architecture": "arm64"
},
- {
- "CompatibleArchitecture": "arm64"
- }
),
- (
- {}, {}
- )
- ]
+ ({"compatible_architecture": "arm64"}, {"CompatibleArchitecture": "arm64"}),
+ ({}, {}),
+ ],
)
@patch(mod__list_layers)
def test_list_layers_with_latest_version(m__list_layers, params, call_args):
-
lambda_client = MagicMock()
m__list_layers.return_value = list_layers_paginate_result
layers = lambda_layer_info.list_layers(lambda_client, **params)
- m__list_layers.assert_has_calls(
- [
- call(lambda_client, **call_args)
- ]
- )
+ m__list_layers.assert_has_calls([call(lambda_client, **call_args)])
assert layers == list_layers_result
@@ -195,16 +154,8 @@ def test_list_layers_with_latest_version(m__list_layers, params, call_args):
"params,call_args",
[
(
- {
- "name": "layer-01",
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- },
- {
- "LayerName": "layer-01",
- "CompatibleRuntime": "nodejs",
- "CompatibleArchitecture": "arm64"
- }
+ {"name": "layer-01", "compatible_runtime": "nodejs", "compatible_architecture": "arm64"},
+ {"LayerName": "layer-01", "CompatibleRuntime": "nodejs", "CompatibleArchitecture": "arm64"},
),
(
{
@@ -214,36 +165,23 @@ def test_list_layers_with_latest_version(m__list_layers, params, call_args):
{
"LayerName": "layer-01",
"CompatibleRuntime": "nodejs",
- }
- ),
- (
- {
- "name": "layer-01",
- "compatible_architecture": "arm64"
},
- {
- "LayerName": "layer-01",
- "CompatibleArchitecture": "arm64"
- }
),
(
- {"name": "layer-01"}, {"LayerName": "layer-01"}
- )
- ]
+ {"name": "layer-01", "compatible_architecture": "arm64"},
+ {"LayerName": "layer-01", "CompatibleArchitecture": "arm64"},
+ ),
+ ({"name": "layer-01"}, {"LayerName": "layer-01"}),
+ ],
)
@patch(mod__list_layer_versions)
def test_list_layer_versions(m__list_layer_versions, params, call_args):
-
lambda_client = MagicMock()
m__list_layer_versions.return_value = list_layers_versions_paginate_result
layers = lambda_layer_info.list_layer_versions(lambda_client, **params)
- m__list_layer_versions.assert_has_calls(
- [
- call(lambda_client, **call_args)
- ]
- )
+ m__list_layer_versions.assert_has_calls([call(lambda_client, **call_args)])
assert layers == list_layers_versions_result
@@ -251,28 +189,69 @@ def raise_botocore_exception():
return BotoCoreError(error="failed", operation="list_layers")
+def test_get_layer_version_success():
+ aws_layer_version = {
+ "CompatibleRuntimes": ["python3.8"],
+ "Content": {
+ "CodeSha256": "vqxKx6nTW31obVcB4MYaTWv5H3fBQTn2PHklL9+mF9E=",
+ "CodeSize": 9492621,
+ "Location": "https://test.s3.us-east-1.amazonaws.com/snapshots/123456789012/test-79b29d149e06?versionId=nmEKA3ZgiP7hce3J",
+ },
+ "CreatedDate": "2022-12-05T10:47:32.379+0000",
+ "Description": "Python units test layer",
+ "LayerArn": "arn:aws:lambda:us-east-1:123456789012:layer:test",
+ "LayerVersionArn": "arn:aws:lambda:us-east-1:123456789012:layer:test:2",
+ "LicenseInfo": "GPL-3.0-only",
+ "Version": 2,
+ "ResponseMetadata": {"some-metadata": "some-result"},
+ }
+
+ ansible_layer_version = {
+ "compatible_runtimes": ["python3.8"],
+ "content": {
+ "code_sha256": "vqxKx6nTW31obVcB4MYaTWv5H3fBQTn2PHklL9+mF9E=",
+ "code_size": 9492621,
+ "location": "https://test.s3.us-east-1.amazonaws.com/snapshots/123456789012/test-79b29d149e06?versionId=nmEKA3ZgiP7hce3J",
+ },
+ "created_date": "2022-12-05T10:47:32.379+0000",
+ "description": "Python units test layer",
+ "layer_arn": "arn:aws:lambda:us-east-1:123456789012:layer:test",
+ "layer_version_arn": "arn:aws:lambda:us-east-1:123456789012:layer:test:2",
+ "license_info": "GPL-3.0-only",
+ "version": 2,
+ }
+
+ lambda_client = MagicMock()
+ lambda_client.get_layer_version.return_value = aws_layer_version
+
+ layer_name = "test"
+ layer_version = 2
+
+ assert [ansible_layer_version] == lambda_layer_info.get_layer_version(lambda_client, layer_name, layer_version)
+ lambda_client.get_layer_version.assert_called_once_with(LayerName=layer_name, VersionNumber=layer_version)
+
+
+def test_get_layer_version_failure():
+ lambda_client = MagicMock()
+ lambda_client.get_layer_version.side_effect = raise_botocore_exception()
+
+ layer_name = MagicMock()
+ layer_version = MagicMock()
+
+ with pytest.raises(lambda_layer_info.LambdaLayerInfoFailure):
+ lambda_layer_info.get_layer_version(lambda_client, layer_name, layer_version)
+
+
@pytest.mark.parametrize(
"params",
[
- (
- {
- "name": "test-layer",
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- }
- ),
- (
- {
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- }
- )
- ]
+ ({"name": "test-layer", "compatible_runtime": "nodejs", "compatible_architecture": "arm64"}),
+ ({"compatible_runtime": "nodejs", "compatible_architecture": "arm64"}),
+ ],
)
@patch(mod__list_layers)
@patch(mod__list_layer_versions)
def test_list_layers_with_failure(m__list_layer_versions, m__list_layers, params):
-
lambda_client = MagicMock()
if "name" in params:
@@ -293,35 +272,14 @@ def raise_layer_info_exception(exc, msg):
@pytest.mark.parametrize(
"params,failure",
[
- (
- {
- "name": "test-layer",
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- },
- False
- ),
- (
- {
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- },
- False
- ),
- (
- {
- "name": "test-layer",
- "compatible_runtime": "nodejs",
- "compatible_architecture": "arm64"
- },
- True
- )
- ]
+ ({"name": "test-layer", "compatible_runtime": "nodejs", "compatible_architecture": "arm64"}, False),
+ ({"compatible_runtime": "nodejs", "compatible_architecture": "arm64"}, False),
+ ({"name": "test-layer", "compatible_runtime": "nodejs", "compatible_architecture": "arm64"}, True),
+ ],
)
@patch(mod_list_layers)
@patch(mod_list_layer_versions)
def test_execute_module(m_list_layer_versions, m_list_layers, params, failure):
-
lambda_client = MagicMock()
module = MagicMock()
@@ -351,8 +309,6 @@ def test_execute_module(m_list_layer_versions, m_list_layers, params, failure):
with pytest.raises(SystemExit):
lambda_layer_info.execute_module(module, lambda_client)
- module.exit_json.assert_called_with(
- changed=False, layers_versions=result
- )
+ module.exit_json.assert_called_with(changed=False, layers_versions=result)
method_called.assert_called_with(lambda_client, **params)
method_not_called.list_layers.assert_not_called()
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_rds_instance_info.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_rds_instance_info.py
new file mode 100644
index 000000000..8db20f1a0
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_rds_instance_info.py
@@ -0,0 +1,121 @@
+# (c) 2022 Red Hat Inc.
+#
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from unittest.mock import ANY
+from unittest.mock import MagicMock
+from unittest.mock import call
+from unittest.mock import patch
+
+import botocore.exceptions
+import pytest
+
+from ansible_collections.amazon.aws.plugins.modules import rds_instance_info
+
+mod_name = "ansible_collections.amazon.aws.plugins.modules.rds_instance_info"
+
+
+def a_boto_exception():
+ return botocore.exceptions.UnknownServiceError(service_name="Whoops", known_service_names="Oula")
+
+
+@patch(mod_name + "._describe_db_instances")
+@patch(mod_name + ".get_instance_tags")
+def test_instance_info_one_instance(m_get_instance_tags, m_describe_db_instances):
+ conn = MagicMock()
+ instance_name = "my-instance"
+ m_get_instance_tags.return_value = []
+ m_describe_db_instances.return_value = [
+ {
+ "DBInstanceIdentifier": instance_name,
+ "DBInstanceArn": "arn:aws:rds:us-east-2:123456789012:og:" + instance_name,
+ }
+ ]
+ rds_instance_info.instance_info(conn, instance_name, filters={})
+
+ m_describe_db_instances.assert_called_with(conn, DBInstanceIdentifier=instance_name)
+ m_get_instance_tags.assert_called_with(conn, arn="arn:aws:rds:us-east-2:123456789012:og:" + instance_name)
+
+
+@patch(mod_name + "._describe_db_instances")
+@patch(mod_name + ".get_instance_tags")
+def test_instance_info_all_instances(m_get_instance_tags, m_describe_db_instances):
+ conn = MagicMock()
+ m_get_instance_tags.return_value = []
+ m_describe_db_instances.return_value = [
+ {
+ "DBInstanceIdentifier": "first-instance",
+ "DBInstanceArn": "arn:aws:rds:us-east-2:123456789012:og:first-instance",
+ },
+ {
+ "DBInstanceIdentifier": "second-instance",
+ "DBInstanceArn": "arn:aws:rds:us-east-2:123456789012:og:second-instance",
+ },
+ ]
+ rds_instance_info.instance_info(conn, instance_name=None, filters={"engine": "postgres"})
+
+ m_describe_db_instances.assert_called_with(conn, Filters=[{"Name": "engine", "Values": ["postgres"]}])
+ assert m_get_instance_tags.call_count == 2
+ m_get_instance_tags.assert_has_calls(
+ [
+ call(conn, arn="arn:aws:rds:us-east-2:123456789012:og:first-instance"),
+ call(conn, arn="arn:aws:rds:us-east-2:123456789012:og:second-instance"),
+ ]
+ )
+
+
+def test_get_instance_tags():
+ conn = MagicMock()
+ conn.list_tags_for_resource.return_value = {
+ "TagList": [
+ {"Key": "My-tag", "Value": "the-value$"},
+ ],
+ "NextToken": "some-token",
+ }
+
+ tags = rds_instance_info.get_instance_tags(conn, "arn:aws:rds:us-east-2:123456789012:og:second-instance")
+ conn.list_tags_for_resource.assert_called_with(
+ ResourceName="arn:aws:rds:us-east-2:123456789012:og:second-instance",
+ aws_retry=True,
+ )
+ assert tags == {"My-tag": "the-value$"}
+
+
+def test_api_failure_get_tag():
+ conn = MagicMock()
+ conn.list_tags_for_resource.side_effect = a_boto_exception()
+
+ with pytest.raises(rds_instance_info.RdsInstanceInfoFailure):
+ rds_instance_info.get_instance_tags(conn, "arn:blabla")
+
+
+def test_api_failure_describe():
+ conn = MagicMock()
+ conn.get_paginator.side_effect = a_boto_exception()
+
+ with pytest.raises(rds_instance_info.RdsInstanceInfoFailure):
+ rds_instance_info.instance_info(conn, None, {})
+
+
+@patch(mod_name + ".AnsibleAWSModule")
+def test_main_success(m_AnsibleAWSModule):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+
+ rds_instance_info.main()
+
+ m_module.client.assert_called_with("rds", retry_decorator=ANY)
+ m_module.exit_json.assert_called_with(changed=False, instances=[])
+
+
+@patch(mod_name + "._describe_db_instances")
+@patch(mod_name + ".AnsibleAWSModule")
+def test_main_failure(m_AnsibleAWSModule, m_describe_db_instances):
+ m_module = MagicMock()
+ m_AnsibleAWSModule.return_value = m_module
+ m_describe_db_instances.side_effect = a_boto_exception()
+
+ rds_instance_info.main()
+
+ m_module.client.assert_called_with("rds", retry_decorator=ANY)
+ m_module.fail_json_aws.assert_called_with(ANY, "Couldn't get instance information")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_s3_object.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_s3_object.py
index b02513072..deeb1c4a0 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_s3_object.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/test_s3_object.py
@@ -1,29 +1,156 @@
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+# (c) 2022 Red Hat Inc.
-from ansible.module_utils.six.moves.urllib.parse import urlparse
+# This file is part of Ansible
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from unittest.mock import MagicMock
+from unittest.mock import patch
+
+import botocore.exceptions
+import pytest
from ansible_collections.amazon.aws.plugins.modules import s3_object
+module_name = "ansible_collections.amazon.aws.plugins.modules.s3_object"
+utils = "ansible_collections.amazon.aws.plugins.module_utils.ec2"
+
+
+@patch(module_name + ".paginated_list")
+def test_list_keys_success(m_paginated_list):
+ s3 = MagicMock()
+
+ m_paginated_list.return_value = ["delete.txt"]
+
+ assert ["delete.txt"] == s3_object.list_keys(s3, "a987e6b6026ab04e4717", "", "", 1000)
+ m_paginated_list.assert_called_once()
+
+
+@patch(module_name + ".paginated_list")
+def test_list_keys_failure(m_paginated_list):
+ s3 = MagicMock()
+
+ m_paginated_list.side_effect = botocore.exceptions.BotoCoreError
+
+ with pytest.raises(s3_object.S3ObjectFailure):
+ s3_object.list_keys(s3, "a987e6b6026ab04e4717", "", "", 1000)
+
+
+@patch(module_name + ".delete_key")
+def test_s3_object_do_delobj_success(m_delete_key):
+ module = MagicMock()
+ s3 = MagicMock()
+ var_dict = {
+ "object": "/usr/local/myfile.txt",
+ "bucket": "a987e6b6026ab04e4717",
+ }
+ s3_object.s3_object_do_delobj(module, s3, s3, var_dict)
+ assert m_delete_key.call_count == 1
+ module.exit_json.assert_called_with(msg="Object deleted from bucket a987e6b6026ab04e4717.", changed=True)
+
+
+@patch(module_name + ".delete_key")
+def test_s3_object_do_delobj_failure_nobucket(m_delete_key):
+ module = MagicMock()
+ s3 = MagicMock()
+
+ var_dict = {"object": "/usr/local/myfile.txt", "bucket": ""}
+ s3_object.s3_object_do_delobj(module, s3, s3, var_dict)
+ assert m_delete_key.call_count == 0
+ module.fail_json.assert_called_with(msg="Bucket parameter is required.")
+
+
+@patch(module_name + ".delete_key")
+def test_s3_object_do_delobj_failure_noobj(m_delete_key):
+ module = MagicMock()
+ s3 = MagicMock()
+ var_dict = {"bucket": "a987e6b6026ab04e4717", "object": ""}
+ s3_object.s3_object_do_delobj(module, s3, s3, var_dict)
+ assert m_delete_key.call_count == 0
+ module.fail_json.assert_called_with(msg="object parameter is required")
+
+
+@patch(module_name + ".paginated_list")
+@patch(module_name + ".list_keys")
+def test_s3_object_do_list_success(m_paginated_list, m_list_keys):
+ module = MagicMock()
+ s3 = MagicMock()
+
+ m_paginated_list.return_value = ["delete.txt"]
+ var_dict = {
+ "bucket": "a987e6b6026ab04e4717",
+ "prefix": "",
+ "marker": "",
+ "max_keys": 1000,
+ "bucketrtn": True,
+ }
+
+ s3_object.s3_object_do_list(module, s3, s3, var_dict)
+ assert m_paginated_list.call_count == 1
+ # assert m_list_keys.call_count == 1
+ # module.exit_json.assert_called_with(msg="LIST operation complete", s3_keys=['delete.txt'])
+
+
+@patch(utils + ".get_aws_connection_info")
+def test_populate_params(m_get_aws_connection_info):
+ module = MagicMock()
+ m_get_aws_connection_info.return_value = (
+ "us-east-1",
+ None,
+ {
+ "aws_access_key_id": "xxxx",
+ "aws_secret_access_key": "yyyy",
+ "aws_session_token": "zzzz",
+ "verify": True,
+ },
+ )
-class TestUrlparse():
+ module.params = {
+ "bucket": "4a6cfe3c17b798613fa77b462e402984",
+ "ceph": False,
+ "content": None,
+ "content_base64": None,
+ "copy_src": None,
+ "debug_botocore_endpoint_logs": True,
+ "dest": None,
+ "dualstack": False,
+ "encrypt": True,
+ "encryption_kms_key_id": None,
+ "encryption_mode": "AES256",
+ "endpoint_url": None,
+ "expiry": 600,
+ "headers": None,
+ "ignore_nonexistent_bucket": False,
+ "marker": "",
+ "max_keys": 1000,
+ "metadata": None,
+ "mode": "create",
+ "object": None,
+ "overwrite": "latest",
+ "permission": ["private"],
+ "prefix": "",
+ "profile": None,
+ "purge_tags": True,
+ "region": "us-east-1",
+ "retries": 0,
+ "sig_v4": True,
+ "src": None,
+ "tags": None,
+ "validate_bucket_name": False,
+ "validate_certs": True,
+ "version": None,
+ }
+ result = s3_object.populate_params(module)
+ for k, v in module.params.items():
+ assert result[k] == v
- def test_urlparse(self):
- actual = urlparse("http://test.com/here")
- assert actual.scheme == "http"
- assert actual.netloc == "test.com"
- assert actual.path == "/here"
+ module.params.update({"object": "example.txt", "mode": "get"})
+ result = s3_object.populate_params(module)
+ assert result["object"] == "example.txt"
- def test_is_fakes3(self):
- actual = s3_object.is_fakes3("fakes3://bla.blubb")
- assert actual is True
+ module.params.update({"object": "/example.txt", "mode": "get"})
+ result = s3_object.populate_params(module)
+ assert result["object"] == "example.txt"
- def test_get_s3_connection(self):
- aws_connect_kwargs = dict(aws_access_key_id="access_key",
- aws_secret_access_key="secret_key")
- location = None
- rgw = True
- s3_url = "http://bla.blubb"
- actual = s3_object.get_s3_connection(None, aws_connect_kwargs, location, rgw, s3_url)
- assert "bla.blubb" in str(actual._endpoint)
+ module.params.update({"object": "example.txt", "mode": "delete"})
+ result = s3_object.populate_params(module)
+ module.fail_json.assert_called_with(msg="Parameter object cannot be used with mode=delete")
diff --git a/ansible_collections/amazon/aws/tests/unit/plugins/modules/utils.py b/ansible_collections/amazon/aws/tests/unit/plugins/modules/utils.py
index 058a5b605..72b3b887e 100644
--- a/ansible_collections/amazon/aws/tests/unit/plugins/modules/utils.py
+++ b/ansible_collections/amazon/aws/tests/unit/plugins/modules/utils.py
@@ -1,21 +1,18 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import json
+import unittest
+from unittest.mock import patch
-from ansible_collections.amazon.aws.tests.unit.compat import unittest
-from ansible_collections.amazon.aws.tests.unit.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
def set_module_args(args):
- if '_ansible_remote_tmp' not in args:
- args['_ansible_remote_tmp'] = '/tmp'
- if '_ansible_keep_remote_files' not in args:
- args['_ansible_keep_remote_files'] = False
+ if "_ansible_remote_tmp" not in args:
+ args["_ansible_remote_tmp"] = "/tmp"
+ if "_ansible_keep_remote_files" not in args:
+ args["_ansible_keep_remote_files"] = False
- args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
+ args = json.dumps({"ANSIBLE_MODULE_ARGS": args})
basic._ANSIBLE_ARGS = to_bytes(args)
@@ -28,22 +25,21 @@ class AnsibleFailJson(Exception):
def exit_json(*args, **kwargs):
- if 'changed' not in kwargs:
- kwargs['changed'] = False
+ if "changed" not in kwargs:
+ kwargs["changed"] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs):
- kwargs['failed'] = True
+ kwargs["failed"] = True
raise AnsibleFailJson(kwargs)
class ModuleTestCase(unittest.TestCase):
-
def setUp(self):
self.mock_module = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
self.mock_module.start()
- self.mock_sleep = patch('time.sleep')
+ self.mock_sleep = patch("time.sleep")
self.mock_sleep.start()
set_module_args({})
self.addCleanup(self.mock_module.stop)
diff --git a/ansible_collections/amazon/aws/tests/unit/utils/__init__.py b/ansible_collections/amazon/aws/tests/unit/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/amazon/aws/tests/unit/utils/__init__.py
diff --git a/ansible_collections/amazon/aws/tests/unit/utils/amazon_placebo_fixtures.py b/ansible_collections/amazon/aws/tests/unit/utils/amazon_placebo_fixtures.py
index 6912c2e32..afe91adad 100644
--- a/ansible_collections/amazon/aws/tests/unit/utils/amazon_placebo_fixtures.py
+++ b/ansible_collections/amazon/aws/tests/unit/utils/amazon_placebo_fixtures.py
@@ -1,9 +1,13 @@
-from __future__ import absolute_import, division, print_function
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
__metaclass__ = type
import errno
import os
import time
+
import mock
import pytest
@@ -54,19 +58,19 @@ def placeboify(request, monkeypatch):
namespace `placebo_recordings/{testfile name}/{test function name}` to
distinguish them.
"""
- session = boto3.Session(region_name='us-west-2')
+ session = boto3.Session(region_name="us-west-2")
recordings_path = os.path.join(
request.fspath.dirname,
- 'placebo_recordings',
- request.fspath.basename.replace('.py', ''),
+ "placebo_recordings",
+ request.fspath.basename.replace(".py", ""),
request.function.__name__
# remove the test_ prefix from the function & file name
- ).replace('test_', '')
+ ).replace("test_", "")
- if not os.getenv('PLACEBO_RECORD'):
+ if not os.getenv("PLACEBO_RECORD"):
if not os.path.isdir(recordings_path):
- raise NotImplementedError('Missing Placebo recordings in directory: %s' % recordings_path)
+ raise NotImplementedError(f"Missing Placebo recordings in directory: {recordings_path}")
else:
try:
# make sure the directory for placebo test recordings is available
@@ -76,21 +80,22 @@ def placeboify(request, monkeypatch):
raise
pill = placebo.attach(session, data_path=recordings_path)
- if os.getenv('PLACEBO_RECORD'):
+ if os.getenv("PLACEBO_RECORD"):
pill.record()
else:
pill.playback()
- def boto3_middleman_connection(module, conn_type, resource, region='us-west-2', **kwargs):
- if conn_type != 'client':
+ def boto3_middleman_connection(module, conn_type, resource, region="us-west-2", **kwargs):
+ if conn_type != "client":
# TODO support resource-based connections
- raise ValueError('Mocker only supports client, not %s' % conn_type)
+ raise ValueError(f"Mocker only supports client, not {conn_type}")
return session.client(resource, region_name=region)
import ansible_collections.amazon.aws.plugins.module_utils.ec2
+
monkeypatch.setattr(
ansible_collections.amazon.aws.plugins.module_utils.ec2,
- 'boto3_conn',
+ "boto3_conn",
boto3_middleman_connection,
)
yield session
@@ -99,113 +104,114 @@ def placeboify(request, monkeypatch):
pill.stop()
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def basic_launch_config():
"""Create an EC2 launch config whose creation *is not* recorded and return its name
This fixture is module-scoped, since launch configs are immutable and this
can be reused for many tests.
"""
- if not os.getenv('PLACEBO_RECORD'):
- yield 'pytest_basic_lc'
+ if not os.getenv("PLACEBO_RECORD"):
+ yield "pytest_basic_lc"
return
# use a *non recording* session to make the launch config
# since that's a prereq of the ec2_asg module, and isn't what
# we're testing.
- asg = boto3.client('autoscaling')
+ asg = boto3.client("autoscaling")
asg.create_launch_configuration(
- LaunchConfigurationName='pytest_basic_lc',
- ImageId='ami-9be6f38c', # Amazon Linux 2016.09 us-east-1 AMI, can be any valid AMI
+ LaunchConfigurationName="pytest_basic_lc",
+ ImageId="ami-9be6f38c", # Amazon Linux 2016.09 us-east-1 AMI, can be any valid AMI
SecurityGroups=[],
- UserData='#!/bin/bash\necho hello world',
- InstanceType='t2.micro',
- InstanceMonitoring={'Enabled': False},
- AssociatePublicIpAddress=True
+ UserData="#!/bin/bash\necho hello world",
+ InstanceType="t2.micro",
+ InstanceMonitoring={"Enabled": False},
+ AssociatePublicIpAddress=True,
)
- yield 'pytest_basic_lc'
+ yield "pytest_basic_lc"
try:
- asg.delete_launch_configuration(LaunchConfigurationName='pytest_basic_lc')
+ asg.delete_launch_configuration(LaunchConfigurationName="pytest_basic_lc")
except botocore.exceptions.ClientError as e:
- if 'not found' in e.message:
+ if "not found" in e.message:
return
raise
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def scratch_vpc():
- if not os.getenv('PLACEBO_RECORD'):
+ if not os.getenv("PLACEBO_RECORD"):
yield {
- 'vpc_id': 'vpc-123456',
- 'cidr_range': '10.0.0.0/16',
- 'subnets': [
+ "vpc_id": "vpc-123456",
+ "cidr_range": "10.0.0.0/16",
+ "subnets": [
{
- 'id': 'subnet-123456',
- 'az': 'us-east-1d',
+ "id": "subnet-123456",
+ "az": "us-east-1d",
},
{
- 'id': 'subnet-654321',
- 'az': 'us-east-1e',
+ "id": "subnet-654321",
+ "az": "us-east-1e",
},
- ]
+ ],
}
return
# use a *non recording* session to make the base VPC and subnets
- ec2 = boto3.client('ec2')
+ ec2 = boto3.client("ec2")
vpc_resp = ec2.create_vpc(
- CidrBlock='10.0.0.0/16',
+ CidrBlock="10.0.0.0/16",
AmazonProvidedIpv6CidrBlock=False,
)
subnets = (
ec2.create_subnet(
- VpcId=vpc_resp['Vpc']['VpcId'],
- CidrBlock='10.0.0.0/24',
+ VpcId=vpc_resp["Vpc"]["VpcId"],
+ CidrBlock="10.0.0.0/24",
),
ec2.create_subnet(
- VpcId=vpc_resp['Vpc']['VpcId'],
- CidrBlock='10.0.1.0/24',
- )
+ VpcId=vpc_resp["Vpc"]["VpcId"],
+ CidrBlock="10.0.1.0/24",
+ ),
)
time.sleep(3)
yield {
- 'vpc_id': vpc_resp['Vpc']['VpcId'],
- 'cidr_range': '10.0.0.0/16',
- 'subnets': [
+ "vpc_id": vpc_resp["Vpc"]["VpcId"],
+ "cidr_range": "10.0.0.0/16",
+ "subnets": [
{
- 'id': s['Subnet']['SubnetId'],
- 'az': s['Subnet']['AvailabilityZone'],
- } for s in subnets
- ]
+ "id": s["Subnet"]["SubnetId"],
+ "az": s["Subnet"]["AvailabilityZone"],
+ }
+ for s in subnets
+ ],
}
try:
for s in subnets:
try:
- ec2.delete_subnet(SubnetId=s['Subnet']['SubnetId'])
+ ec2.delete_subnet(SubnetId=s["Subnet"]["SubnetId"])
except botocore.exceptions.ClientError as e:
- if 'not found' in e.message:
+ if "not found" in e.message:
continue
raise
- ec2.delete_vpc(VpcId=vpc_resp['Vpc']['VpcId'])
+ ec2.delete_vpc(VpcId=vpc_resp["Vpc"]["VpcId"])
except botocore.exceptions.ClientError as e:
- if 'not found' in e.message:
+ if "not found" in e.message:
return
raise
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def maybe_sleep():
"""If placebo is reading saved sessions, make sleep always take 0 seconds.
AWS modules often perform polling or retries, but when using recorded
sessions there's no reason to wait. We can still exercise retry and other
code paths without waiting for wall-clock time to pass."""
- if not os.getenv('PLACEBO_RECORD'):
- p = mock.patch('time.sleep', return_value=None)
+ if not os.getenv("PLACEBO_RECORD"):
+ p = mock.patch("time.sleep", return_value=None)
p.start()
yield
p.stop()