summaryrefslogtreecommitdiffstats
path: root/test/units
diff options
context:
space:
mode:
Diffstat (limited to 'test/units')
-rw-r--r--test/units/_vendor/test_vendor.py19
-rw-r--r--test/units/ansible_test/diff/add_binary_file.diff4
-rw-r--r--test/units/ansible_test/diff/add_text_file.diff8
-rw-r--r--test/units/ansible_test/diff/add_trailing_newline.diff9
-rw-r--r--test/units/ansible_test/diff/add_two_text_files.diff16
-rw-r--r--test/units/ansible_test/diff/context_no_trailing_newline.diff8
-rw-r--r--test/units/ansible_test/diff/multiple_context_lines.diff10
-rw-r--r--test/units/ansible_test/diff/parse_delete.diff16
-rw-r--r--test/units/ansible_test/diff/parse_rename.diff8
-rw-r--r--test/units/ansible_test/diff/remove_trailing_newline.diff9
-rw-r--r--test/units/ansible_test/test_diff.py178
-rw-r--r--test/units/ansible_test/test_validate_modules.py63
-rw-r--r--test/units/cli/arguments/test_optparse_helpers.py5
-rw-r--r--test/units/cli/galaxy/test_execute_list_collection.py152
-rw-r--r--test/units/cli/test_adhoc.py10
-rw-r--r--test/units/cli/test_data/collection_skeleton/README.md2
-rw-r--r--test/units/cli/test_data/collection_skeleton/docs/My Collection.md2
-rw-r--r--test/units/cli/test_doc.py3
-rw-r--r--test/units/cli/test_galaxy.py110
-rw-r--r--test/units/cli/test_vault.py23
-rw-r--r--test/units/compat/mock.py2
-rw-r--r--test/units/config/manager/test_find_ini_config_file.py67
-rw-r--r--test/units/config/test3.cfg4
-rw-r--r--test/units/config/test_manager.py30
-rw-r--r--test/units/executor/module_common/conftest.py10
-rw-r--r--test/units/executor/module_common/test_modify_module.py8
-rw-r--r--test/units/executor/module_common/test_module_common.py39
-rw-r--r--test/units/executor/module_common/test_recursive_finder.py5
-rw-r--r--test/units/executor/test_interpreter_discovery.py8
-rw-r--r--test/units/executor/test_play_iterator.py24
-rw-r--r--test/units/executor/test_task_executor.py55
-rw-r--r--test/units/galaxy/test_api.py37
-rw-r--r--test/units/galaxy/test_collection.py161
-rw-r--r--test/units/galaxy/test_collection_install.py156
-rw-r--r--test/units/galaxy/test_role_install.py21
-rw-r--r--test/units/galaxy/test_token.py2
-rw-r--r--test/units/inventory/test_host.py8
-rw-r--r--test/units/mock/loader.py30
-rw-r--r--test/units/mock/procenv.py27
-rw-r--r--test/units/mock/vault_helper.py2
-rw-r--r--test/units/mock/yaml_helper.py73
-rw-r--r--test/units/module_utils/basic/test__symbolic_mode_to_octal.py8
-rw-r--r--test/units/module_utils/basic/test_argument_spec.py2
-rw-r--r--test/units/module_utils/basic/test_command_nonexisting.py5
-rw-r--r--test/units/module_utils/basic/test_filesystem.py2
-rw-r--r--test/units/module_utils/basic/test_get_available_hash_algorithms.py60
-rw-r--r--test/units/module_utils/basic/test_run_command.py10
-rw-r--r--test/units/module_utils/basic/test_safe_eval.py2
-rw-r--r--test/units/module_utils/basic/test_sanitize_keys.py1
-rw-r--r--test/units/module_utils/basic/test_selinux.py82
-rw-r--r--test/units/module_utils/basic/test_set_cwd.py7
-rw-r--r--test/units/module_utils/basic/test_tmpdir.py2
-rw-r--r--test/units/module_utils/common/arg_spec/test_aliases.py1
-rw-r--r--test/units/module_utils/common/parameters/test_handle_aliases.py2
-rw-r--r--test/units/module_utils/common/parameters/test_list_deprecations.py11
-rw-r--r--test/units/module_utils/common/test_collections.py21
-rw-r--r--test/units/module_utils/common/text/converters/test_json_encode_fallback.py6
-rw-r--r--test/units/module_utils/common/validation/test_check_missing_parameters.py8
-rw-r--r--test/units/module_utils/common/validation/test_check_mutually_exclusive.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_arguments.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_by.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_if.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_one_of.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_together.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bits.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bool.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bytes.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_float.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_int.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_jsonarg.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_str.py2
-rw-r--r--test/units/module_utils/compat/__init__.py0
-rw-r--r--test/units/module_utils/compat/test_datetime.py34
-rw-r--r--test/units/module_utils/conftest.py4
-rw-r--r--test/units/module_utils/facts/base.py4
-rw-r--r--test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo14
-rw-r--r--test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo1037
-rw-r--r--test/units/module_utils/facts/hardware/linux_data.py62
-rw-r--r--test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py4
-rw-r--r--test/units/module_utils/facts/network/test_locally_reachable_ips.py93
-rw-r--r--test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py6
-rw-r--r--test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py5
-rw-r--r--test/units/module_utils/facts/system/test_pkg_mgr.py75
-rw-r--r--test/units/module_utils/facts/test_collectors.py5
-rw-r--r--test/units/module_utils/facts/test_date_time.py15
-rw-r--r--test/units/module_utils/facts/test_sysctl.py6
-rw-r--r--test/units/module_utils/facts/test_timeout.py2
-rw-r--r--test/units/module_utils/test_text.py21
-rw-r--r--test/units/module_utils/urls/test_Request.py14
-rw-r--r--test/units/module_utils/urls/test_fetch_file.py1
-rw-r--r--test/units/module_utils/urls/test_prepare_multipart.py2
-rw-r--r--test/units/module_utils/urls/test_urls.py2
-rw-r--r--test/units/modules/conftest.py21
-rw-r--r--test/units/modules/test_apt.py29
-rw-r--r--test/units/modules/test_async_wrapper.py9
-rw-r--r--test/units/modules/test_copy.py23
-rw-r--r--test/units/modules/test_hostname.py10
-rw-r--r--test/units/modules/test_iptables.py40
-rw-r--r--test/units/modules/test_known_hosts.py2
-rw-r--r--test/units/modules/test_unarchive.py20
-rw-r--r--test/units/modules/utils.py10
-rw-r--r--test/units/parsing/test_ajson.py6
-rw-r--r--test/units/parsing/test_dataloader.py13
-rw-r--r--test/units/parsing/test_mod_args.py10
-rw-r--r--test/units/parsing/test_splitter.py75
-rw-r--r--test/units/parsing/vault/test_vault.py43
-rw-r--r--test/units/parsing/vault/test_vault_editor.py79
-rw-r--r--test/units/parsing/yaml/test_dumper.py21
-rw-r--r--test/units/parsing/yaml/test_objects.py7
-rw-r--r--test/units/playbook/role/test_include_role.py6
-rw-r--r--test/units/playbook/role/test_role.py77
-rw-r--r--test/units/playbook/test_base.py20
-rw-r--r--test/units/playbook/test_collectionsearch.py1
-rw-r--r--test/units/playbook/test_helpers.py62
-rw-r--r--test/units/playbook/test_included_file.py14
-rw-r--r--test/units/playbook/test_play_context.py2
-rw-r--r--test/units/playbook/test_taggable.py1
-rw-r--r--test/units/playbook/test_task.py2
-rw-r--r--test/units/plugins/action/test_action.py57
-rw-r--r--test/units/plugins/action/test_raw.py6
-rw-r--r--test/units/plugins/cache/test_cache.py5
-rw-r--r--test/units/plugins/connection/test_connection.py75
-rw-r--r--test/units/plugins/connection/test_local.py1
-rw-r--r--test/units/plugins/connection/test_paramiko_ssh.py (renamed from test/units/plugins/connection/test_paramiko.py)14
-rw-r--r--test/units/plugins/connection/test_ssh.py18
-rw-r--r--test/units/plugins/connection/test_winrm.py104
-rw-r--r--test/units/plugins/filter/test_core.py4
-rw-r--r--test/units/plugins/filter/test_mathstuff.py85
-rw-r--r--test/units/plugins/inventory/test_constructed.py10
-rw-r--r--test/units/plugins/inventory/test_inventory.py2
-rw-r--r--test/units/plugins/inventory/test_script.py10
-rw-r--r--test/units/plugins/lookup/test_password.py30
-rw-r--r--test/units/plugins/strategy/test_strategy.py492
-rw-r--r--test/units/plugins/test_plugins.py10
-rw-r--r--test/units/requirements.txt8
-rw-r--r--test/units/template/test_templar.py14
-rw-r--r--test/units/template/test_vars.py23
-rw-r--r--test/units/test_constants.py94
-rw-r--r--test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py2
-rw-r--r--test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py2
-rw-r--r--test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py0
-rw-r--r--test/units/utils/collection_loader/test_collection_loader.py71
-rw-r--r--test/units/utils/display/test_broken_cowsay.py7
-rw-r--r--test/units/utils/display/test_curses.py (renamed from test/units/plugins/action/test_pause.py)30
-rw-r--r--test/units/utils/test_cleanup_tmp_file.py25
-rw-r--r--test/units/utils/test_display.py35
-rw-r--r--test/units/utils/test_encrypt.py13
-rw-r--r--test/units/utils/test_unsafe_proxy.py28
-rw-r--r--test/units/vars/test_module_response_deepcopy.py11
-rw-r--r--test/units/vars/test_variable_manager.py6
150 files changed, 2915 insertions, 2066 deletions
diff --git a/test/units/_vendor/test_vendor.py b/test/units/_vendor/test_vendor.py
index 84b850e..265f5b2 100644
--- a/test/units/_vendor/test_vendor.py
+++ b/test/units/_vendor/test_vendor.py
@@ -1,27 +1,22 @@
# (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import os
import pkgutil
import pytest
import sys
-from unittest.mock import MagicMock, NonCallableMagicMock, patch
+from unittest.mock import patch
def reset_internal_vendor_package():
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
- if ansible_vendor_path in sys.path:
- sys.path.remove(ansible_vendor_path)
+ list(map(sys.path.remove, [path for path in sys.path if path == ansible_vendor_path]))
for pkg in ['ansible._vendor', 'ansible']:
- if pkg in sys.modules:
- del sys.modules[pkg]
+ sys.modules.pop(pkg, None)
def test_package_path_masking():
@@ -50,16 +45,10 @@ def test_vendored(vendored_pkg_names=None):
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
assert sys.path[0] == ansible_vendor_path
-
- if ansible_vendor_path in previous_path:
- previous_path.remove(ansible_vendor_path)
-
assert sys.path[1:] == previous_path
def test_vendored_conflict():
with pytest.warns(UserWarning) as w:
- import pkgutil
- import sys
test_vendored(vendored_pkg_names=['sys', 'pkgutil']) # pass a real package we know is already loaded
- assert any('pkgutil, sys' in str(msg.message) for msg in w) # ensure both conflicting modules are listed and sorted
+ assert any(list('pkgutil, sys' in str(msg.message) for msg in w)) # ensure both conflicting modules are listed and sorted
diff --git a/test/units/ansible_test/diff/add_binary_file.diff b/test/units/ansible_test/diff/add_binary_file.diff
new file mode 100644
index 0000000..ef8f362
--- /dev/null
+++ b/test/units/ansible_test/diff/add_binary_file.diff
@@ -0,0 +1,4 @@
+diff --git a/binary.dat b/binary.dat
+new file mode 100644
+index 0000000000..f76dd238ad
+Binary files /dev/null and b/binary.dat differ
diff --git a/test/units/ansible_test/diff/add_text_file.diff b/test/units/ansible_test/diff/add_text_file.diff
new file mode 100644
index 0000000..068d013
--- /dev/null
+++ b/test/units/ansible_test/diff/add_text_file.diff
@@ -0,0 +1,8 @@
+diff --git a/test.txt b/test.txt
+new file mode 100644
+index 0000000000..814f4a4229
+--- /dev/null
++++ b/test.txt
+@@ -0,0 +1,2 @@
++one
++two
diff --git a/test/units/ansible_test/diff/add_trailing_newline.diff b/test/units/ansible_test/diff/add_trailing_newline.diff
new file mode 100644
index 0000000..d83df60
--- /dev/null
+++ b/test/units/ansible_test/diff/add_trailing_newline.diff
@@ -0,0 +1,9 @@
+diff --git a/test.txt b/test.txt
+index 9ed40b4425..814f4a4229 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1,2 +1,2 @@
+ one
+-two
+\ No newline at end of file
++two
diff --git a/test/units/ansible_test/diff/add_two_text_files.diff b/test/units/ansible_test/diff/add_two_text_files.diff
new file mode 100644
index 0000000..f0c8fb0
--- /dev/null
+++ b/test/units/ansible_test/diff/add_two_text_files.diff
@@ -0,0 +1,16 @@
+diff --git a/one.txt b/one.txt
+new file mode 100644
+index 0000000000..99b976670b
+--- /dev/null
++++ b/one.txt
+@@ -0,0 +1,2 @@
++One
++1
+diff --git a/two.txt b/two.txt
+new file mode 100644
+index 0000000000..da06cc0974
+--- /dev/null
++++ b/two.txt
+@@ -0,0 +1,2 @@
++Two
++2
diff --git a/test/units/ansible_test/diff/context_no_trailing_newline.diff b/test/units/ansible_test/diff/context_no_trailing_newline.diff
new file mode 100644
index 0000000..519d635
--- /dev/null
+++ b/test/units/ansible_test/diff/context_no_trailing_newline.diff
@@ -0,0 +1,8 @@
+diff --git a/test.txt b/test.txt
+index 9ed40b4425..64c5e5885a 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1,2 +1 @@
+-one
+ two
+\ No newline at end of file
diff --git a/test/units/ansible_test/diff/multiple_context_lines.diff b/test/units/ansible_test/diff/multiple_context_lines.diff
new file mode 100644
index 0000000..fd98b7a
--- /dev/null
+++ b/test/units/ansible_test/diff/multiple_context_lines.diff
@@ -0,0 +1,10 @@
+diff --git a/test.txt b/test.txt
+index 949a655cb3..08c59a7cf1 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1,5 +1,3 @@
+ One
+-Two
+ Three
+-Four
+ Five
diff --git a/test/units/ansible_test/diff/parse_delete.diff b/test/units/ansible_test/diff/parse_delete.diff
new file mode 100644
index 0000000..866d43c
--- /dev/null
+++ b/test/units/ansible_test/diff/parse_delete.diff
@@ -0,0 +1,16 @@
+diff --git a/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml b/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml
+deleted file mode 100644
+index a5bc88ffe3..0000000000
+--- a/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml
++++ /dev/null
+@@ -1,10 +0,0 @@
+----
+-
+-trivial:
+- - >-
+- integration tests — added command invocation logging via ``set -x``
+- to ``runme.sh`` scripts where it was missing and improved failing
+- fast in those scripts that use pipes (via ``set -o pipefail``).
+- See `PR #79263` https://github.com/ansible/ansible/pull/79263>`__.
+-
+-...
diff --git a/test/units/ansible_test/diff/parse_rename.diff b/test/units/ansible_test/diff/parse_rename.diff
new file mode 100644
index 0000000..5456372
--- /dev/null
+++ b/test/units/ansible_test/diff/parse_rename.diff
@@ -0,0 +1,8 @@
+diff --git a/packaging/debian/ansible-base.dirs b/packaging/debian/ansible-core.dirs
+similarity index 100%
+rename from packaging/debian/ansible-base.dirs
+rename to packaging/debian/ansible-core.dirs
+diff --git a/packaging/debian/ansible-base.install b/packaging/debian/ansible-core.install
+similarity index 100%
+rename from packaging/debian/ansible-base.install
+rename to packaging/debian/ansible-core.install
diff --git a/test/units/ansible_test/diff/remove_trailing_newline.diff b/test/units/ansible_test/diff/remove_trailing_newline.diff
new file mode 100644
index 0000000..c0750ae
--- /dev/null
+++ b/test/units/ansible_test/diff/remove_trailing_newline.diff
@@ -0,0 +1,9 @@
+diff --git a/test.txt b/test.txt
+index 814f4a4229..9ed40b4425 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1,2 +1,2 @@
+ one
+-two
++two
+\ No newline at end of file
diff --git a/test/units/ansible_test/test_diff.py b/test/units/ansible_test/test_diff.py
new file mode 100644
index 0000000..26ef522
--- /dev/null
+++ b/test/units/ansible_test/test_diff.py
@@ -0,0 +1,178 @@
+"""Tests for the diff module."""
+from __future__ import annotations
+
+import pathlib
+import pytest
+import typing as t
+
+if t.TYPE_CHECKING: # pragma: no cover
+ # noinspection PyProtectedMember
+ from ansible_test._internal.diff import FileDiff
+
+
+@pytest.fixture()
+def diffs(request: pytest.FixtureRequest) -> list[FileDiff]:
+ """A fixture which returns the parsed diff associated with the current test."""
+ return get_parsed_diff(request.node.name.removeprefix('test_'))
+
+
+def get_parsed_diff(name: str) -> list[FileDiff]:
+ """Parse and return the named git diff."""
+ cache = pathlib.Path(__file__).parent / 'diff' / f'{name}.diff'
+ content = cache.read_text()
+ lines = content.splitlines()
+
+ assert lines
+
+ # noinspection PyProtectedMember
+ from ansible_test._internal.diff import parse_diff
+
+ diffs = parse_diff(lines)
+
+ assert diffs
+
+ for item in diffs:
+ assert item.headers
+ assert item.is_complete
+
+ item.old.format_lines()
+ item.new.format_lines()
+
+ for line_range in item.old.ranges:
+ assert line_range[1] >= line_range[0] > 0
+
+ for line_range in item.new.ranges:
+ assert line_range[1] >= line_range[0] > 0
+
+ return diffs
+
+
+def test_add_binary_file(diffs: list[FileDiff]) -> None:
+ """Add a binary file."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'binary.dat'
+ assert diffs[0].new.path == 'binary.dat'
+
+ assert diffs[0].old.eof_newline
+ assert diffs[0].new.eof_newline
+
+
+def test_add_text_file(diffs: list[FileDiff]) -> None:
+ """Add a new file."""
+ assert len(diffs) == 1
+
+ assert not diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'test.txt'
+ assert diffs[0].new.path == 'test.txt'
+
+ assert diffs[0].old.eof_newline
+ assert diffs[0].new.eof_newline
+
+
+def test_remove_trailing_newline(diffs: list[FileDiff]) -> None:
+ """Remove the trailing newline from a file."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'test.txt'
+ assert diffs[0].new.path == 'test.txt'
+
+ assert diffs[0].old.eof_newline
+ assert not diffs[0].new.eof_newline
+
+
+def test_add_trailing_newline(diffs: list[FileDiff]) -> None:
+ """Add a trailing newline to a file."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'test.txt'
+ assert diffs[0].new.path == 'test.txt'
+
+ assert not diffs[0].old.eof_newline
+ assert diffs[0].new.eof_newline
+
+
+def test_add_two_text_files(diffs: list[FileDiff]) -> None:
+ """Add two text files."""
+ assert len(diffs) == 2
+
+ assert not diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'one.txt'
+ assert diffs[0].new.path == 'one.txt'
+
+ assert diffs[0].old.eof_newline
+ assert diffs[0].new.eof_newline
+
+ assert not diffs[1].old.exists
+ assert diffs[1].new.exists
+
+ assert diffs[1].old.path == 'two.txt'
+ assert diffs[1].new.path == 'two.txt'
+
+ assert diffs[1].old.eof_newline
+ assert diffs[1].new.eof_newline
+
+
+def test_context_no_trailing_newline(diffs: list[FileDiff]) -> None:
+ """Context without a trailing newline."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'test.txt'
+ assert diffs[0].new.path == 'test.txt'
+
+ assert not diffs[0].old.eof_newline
+ assert not diffs[0].new.eof_newline
+
+
+def test_multiple_context_lines(diffs: list[FileDiff]) -> None:
+ """Multiple context lines."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert diffs[0].new.exists
+
+ assert diffs[0].old.path == 'test.txt'
+ assert diffs[0].new.path == 'test.txt'
+
+ assert diffs[0].old.eof_newline
+ assert diffs[0].new.eof_newline
+
+
+def test_parse_delete(diffs: list[FileDiff]) -> None:
+ """Delete files."""
+ assert len(diffs) == 1
+
+ assert diffs[0].old.exists
+ assert not diffs[0].new.exists
+
+ assert diffs[0].old.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
+ assert diffs[0].new.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
+
+
+def test_parse_rename(diffs) -> None:
+ """Rename files."""
+ assert len(diffs) == 2
+
+ assert all(item.old.path != item.new.path and item.old.exists and item.new.exists for item in diffs)
+
+ assert diffs[0].old.path == 'packaging/debian/ansible-base.dirs'
+ assert diffs[0].new.path == 'packaging/debian/ansible-core.dirs'
+
+ assert diffs[1].old.path == 'packaging/debian/ansible-base.install'
+ assert diffs[1].new.path == 'packaging/debian/ansible-core.install'
diff --git a/test/units/ansible_test/test_validate_modules.py b/test/units/ansible_test/test_validate_modules.py
new file mode 100644
index 0000000..1b801a5
--- /dev/null
+++ b/test/units/ansible_test/test_validate_modules.py
@@ -0,0 +1,63 @@
+"""Tests for validate-modules regexes."""
+from __future__ import annotations
+
+import pathlib
+import sys
+from unittest import mock
+
+import pytest
+
+
+@pytest.fixture(autouse=True, scope='session')
+def validate_modules() -> None:
+ """Make validate_modules available on sys.path for unit testing."""
+ sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent / 'lib/ansible_test/_util/controller/sanity/validate-modules'))
+
+ # Mock out voluptuous to facilitate testing without it, since tests aren't covering anything that uses it.
+
+ sys.modules['voluptuous'] = voluptuous = mock.MagicMock()
+ sys.modules['voluptuous.humanize'] = voluptuous.humanize = mock.MagicMock()
+
+ # Mock out antsibull_docs_parser to facilitate testing without it, since tests aren't covering anything that uses it.
+
+ sys.modules['antsibull_docs_parser'] = antsibull_docs_parser = mock.MagicMock()
+ sys.modules['antsibull_docs_parser.parser'] = antsibull_docs_parser.parser = mock.MagicMock()
+
+
+@pytest.mark.parametrize('cstring,cexpected', [
+ ['if type(foo) is Bar', True],
+ ['if Bar is type(foo)', True],
+ ['if type(foo) is not Bar', True],
+ ['if Bar is not type(foo)', True],
+ ['if type(foo) == Bar', True],
+ ['if Bar == type(foo)', True],
+ ['if type(foo)==Bar', True],
+ ['if Bar==type(foo)', True],
+ ['if type(foo) != Bar', True],
+ ['if Bar != type(foo)', True],
+ ['if type(foo)!=Bar', True],
+ ['if Bar!=type(foo)', True],
+ ['if foo or type(bar) != Bar', True],
+ ['x = type(foo)', False],
+ ["error = err.message + ' ' + str(err) + ' - ' + str(type(err))", False],
+ # cloud/amazon/ec2_group.py
+ ["module.fail_json(msg='Invalid rule parameter type [%s].' % type(rule))", False],
+ # files/patch.py
+ ["p = type('Params', (), module.params)", False], # files/patch.py
+ # system/osx_defaults.py
+ ["if self.current_value is not None and not isinstance(self.current_value, type(self.value)):", True],
+ # system/osx_defaults.py
+ ['raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)', False],
+ # network/nxos/nxos_interface.py
+ ["if get_interface_type(interface) == 'svi':", False],
+])
+def test_type_regex(cstring, cexpected): # type: (str, str) -> None
+ """Check TYPE_REGEX against various examples to verify it correctly matches or does not match."""
+ from validate_modules.main import TYPE_REGEX
+
+ match = TYPE_REGEX.match(cstring)
+
+ if cexpected:
+ assert match, f"should have matched: {cstring}"
+ else:
+ assert not match, f"should not have matched: {cstring}"
diff --git a/test/units/cli/arguments/test_optparse_helpers.py b/test/units/cli/arguments/test_optparse_helpers.py
index 082c9be..ae8e8d7 100644
--- a/test/units/cli/arguments/test_optparse_helpers.py
+++ b/test/units/cli/arguments/test_optparse_helpers.py
@@ -14,10 +14,7 @@ from ansible.cli.arguments import option_helpers as opt_help
from ansible import __path__ as ansible_path
from ansible.release import __version__ as ansible_version
-if C.DEFAULT_MODULE_PATH is None:
- cpath = u'Default w/o overrides'
-else:
- cpath = C.DEFAULT_MODULE_PATH
+cpath = C.DEFAULT_MODULE_PATH
FAKE_PROG = u'ansible-cli-test'
VERSION_OUTPUT = opt_help.version(prog=FAKE_PROG)
diff --git a/test/units/cli/galaxy/test_execute_list_collection.py b/test/units/cli/galaxy/test_execute_list_collection.py
index e8a834d..5641cb8 100644
--- a/test/units/cli/galaxy/test_execute_list_collection.py
+++ b/test/units/cli/galaxy/test_execute_list_collection.py
@@ -5,37 +5,29 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
+import pathlib
+
import pytest
+from ansible import constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import collection
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
-from ansible.module_utils._text import to_native
-
-
-def path_exists(path):
- if to_native(path) == '/root/.ansible/collections/ansible_collections/sandwiches/ham':
- return False
- elif to_native(path) == '/usr/share/ansible/collections/ansible_collections/sandwiches/reuben':
- return False
- elif to_native(path) == 'nope':
- return False
- else:
- return True
+from ansible.module_utils.common.text.converters import to_native
+from ansible.plugins.loader import init_plugin_loader
def isdir(path):
if to_native(path) == 'nope':
return False
- else:
- return True
+ return True
def cliargs(collections_paths=None, collection_name=None):
if collections_paths is None:
- collections_paths = ['~/root/.ansible/collections', '/usr/share/ansible/collections']
+ collections_paths = ['/root/.ansible/collections', '/usr/share/ansible/collections']
context.CLIARGS._store = {
'collections_path': collections_paths,
@@ -46,95 +38,61 @@ def cliargs(collections_paths=None, collection_name=None):
@pytest.fixture
-def mock_collection_objects(mocker):
- mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', '/usr/share/ansible/collections'])
- mocker.patch('ansible.cli.galaxy.validate_collection_path',
- side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections'])
-
- collection_args_1 = (
- (
+def mock_from_path(mocker, monkeypatch):
+ collection_args = {
+ '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj': (
'sandwiches.pbj',
- '1.5.0',
- None,
+ '1.0.0',
+ '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
'dir',
None,
),
- (
- 'sandwiches.reuben',
- '2.5.0',
- None,
+ '/usr/share/ansible/collections/ansible_collections/sandwiches/ham': (
+ 'sandwiches.ham',
+ '1.0.0',
+ '/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
'dir',
None,
),
- )
-
- collection_args_2 = (
- (
+ '/root/.ansible/collections/ansible_collections/sandwiches/pbj': (
'sandwiches.pbj',
- '1.0.0',
- None,
+ '1.5.0',
+ '/root/.ansible/collections/ansible_collections/sandwiches/pbj',
'dir',
None,
),
- (
- 'sandwiches.ham',
- '1.0.0',
- None,
+ '/root/.ansible/collections/ansible_collections/sandwiches/reuben': (
+ 'sandwiches.reuben',
+ '2.5.0',
+ '/root/.ansible/collections/ansible_collections/sandwiches/reuben',
'dir',
None,
),
- )
+ }
- collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1]
- collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2]
+ def dispatch_requirement(path, am):
+ return Requirement(*collection_args[to_native(path)])
- mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2])
+ files_mock = mocker.MagicMock()
+ mocker.patch('ansible.galaxy.collection.files', return_value=files_mock)
+ files_mock.glob.return_value = []
+ mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
+ for path, args in collection_args.items():
+ files_mock.glob.return_value.append(pathlib.Path(args[2]))
-@pytest.fixture
-def mock_from_path(mocker):
- def _from_path(collection_name='pbj'):
- collection_args = {
- 'sandwiches.pbj': (
- (
- 'sandwiches.pbj',
- '1.5.0',
- None,
- 'dir',
- None,
- ),
- (
- 'sandwiches.pbj',
- '1.0.0',
- None,
- 'dir',
- None,
- ),
- ),
- 'sandwiches.ham': (
- (
- 'sandwiches.ham',
- '1.0.0',
- None,
- 'dir',
- None,
- ),
- ),
- }
-
- from_path_objects = [Requirement(*args) for args in collection_args[collection_name]]
- mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects)
-
- return _from_path
-
-
-def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory):
+ mocker.patch('ansible.galaxy.collection.Candidate.from_dir_path_as_unknown', side_effect=dispatch_requirement)
+
+ monkeypatch.setattr(C, 'COLLECTIONS_PATHS', ['/root/.ansible/collections', '/usr/share/ansible/collections'])
+
+
+def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections from multiple paths"""
cliargs()
+ init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
- mocker.patch('os.path.isdir', return_value=True)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
@@ -152,21 +110,20 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tm
assert out_lines[5] == 'sandwiches.reuben 2.5.0 '
assert out_lines[6] == ''
assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections'
- assert out_lines[8] == 'Collection Version'
- assert out_lines[9] == '-------------- -------'
- assert out_lines[10] == 'sandwiches.ham 1.0.0 '
- assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
+ assert out_lines[8] == 'Collection Version'
+ assert out_lines[9] == '----------------- -------'
+ assert out_lines[10] == 'sandwiches.ham 1.0.0 '
+ assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
-def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
+def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection"""
collection_name = 'sandwiches.ham'
- mock_from_path(collection_name)
cliargs(collection_name=collection_name)
- mocker.patch('os.path.exists', path_exists)
- mocker.patch('os.path.isdir', return_value=True)
+ init_plugin_loader()
+
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
@@ -186,15 +143,14 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
assert out_lines[4] == 'sandwiches.ham 1.0.0 '
-def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
+def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection that exists at multiple paths"""
collection_name = 'sandwiches.pbj'
- mock_from_path(collection_name)
cliargs(collection_name=collection_name)
- mocker.patch('os.path.exists', path_exists)
- mocker.patch('os.path.isdir', return_value=True)
+ init_plugin_loader()
+
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
@@ -221,6 +177,8 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
"""Test an invalid fully qualified collection name (FQCN)"""
+ init_plugin_loader()
+
collection_name = 'no.good.name'
cliargs(collection_name=collection_name)
@@ -238,6 +196,7 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
"""Test listing collections when no valid paths are given"""
cliargs()
+ init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=False)
@@ -257,13 +216,14 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
assert 'exists, but it\nis not a directory.' in err
-def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory):
+def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections when one invalid path is given"""
- cliargs()
+ cliargs(collections_paths=['nope'])
+ init_plugin_loader()
+
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', isdir)
- mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', 'nope'])
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])
diff --git a/test/units/cli/test_adhoc.py b/test/units/cli/test_adhoc.py
index 18775f5..7bcca47 100644
--- a/test/units/cli/test_adhoc.py
+++ b/test/units/cli/test_adhoc.py
@@ -93,19 +93,15 @@ def test_run_no_extra_vars():
assert exec_info.value.code == 2
-def test_ansible_version(capsys, mocker):
+def test_ansible_version(capsys):
adhoc_cli = AdHocCLI(args=['/bin/ansible', '--version'])
with pytest.raises(SystemExit):
adhoc_cli.run()
version = capsys.readouterr()
- try:
- version_lines = version.out.splitlines()
- except AttributeError:
- # Python 2.6 does return a named tuple, so get the first item
- version_lines = version[0].splitlines()
+ version_lines = version.out.splitlines()
assert len(version_lines) == 9, 'Incorrect number of lines in "ansible --version" output'
- assert re.match(r'ansible \[core [0-9.a-z]+\]$', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
+ assert re.match(r'ansible \[core [0-9.a-z]+\]', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
assert re.match(' config file = .*$', version_lines[1]), 'Incorrect config file line in "ansible --version" output'
assert re.match(' configured module search path = .*$', version_lines[2]), 'Incorrect module search path in "ansible --version" output'
assert re.match(' ansible python module location = .*$', version_lines[3]), 'Incorrect python module location in "ansible --version" output'
diff --git a/test/units/cli/test_data/collection_skeleton/README.md b/test/units/cli/test_data/collection_skeleton/README.md
index 4cfd8af..2e3e4ce 100644
--- a/test/units/cli/test_data/collection_skeleton/README.md
+++ b/test/units/cli/test_data/collection_skeleton/README.md
@@ -1 +1 @@
-A readme \ No newline at end of file
+A readme
diff --git a/test/units/cli/test_data/collection_skeleton/docs/My Collection.md b/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
index 6fa917f..0d6781b 100644
--- a/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
+++ b/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
@@ -1 +1 @@
-Welcome to my test collection doc for {{ namespace }}. \ No newline at end of file
+Welcome to my test collection doc for {{ namespace }}.
diff --git a/test/units/cli/test_doc.py b/test/units/cli/test_doc.py
index b10f088..50b714e 100644
--- a/test/units/cli/test_doc.py
+++ b/test/units/cli/test_doc.py
@@ -5,7 +5,7 @@ __metaclass__ = type
import pytest
from ansible.cli.doc import DocCLI, RoleMixin
-from ansible.plugins.loader import module_loader
+from ansible.plugins.loader import module_loader, init_plugin_loader
TTY_IFY_DATA = {
@@ -118,6 +118,7 @@ def test_builtin_modules_list():
args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module']
obj = DocCLI(args=args)
obj.parse()
+ init_plugin_loader()
result = obj._list_plugins('module', module_loader)
assert len(result) > 0
diff --git a/test/units/cli/test_galaxy.py b/test/units/cli/test_galaxy.py
index 8ff5640..80a2dfa 100644
--- a/test/units/cli/test_galaxy.py
+++ b/test/units/cli/test_galaxy.py
@@ -20,6 +20,8 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import contextlib
+
import ansible
from io import BytesIO
import json
@@ -37,7 +39,7 @@ from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import collection
from ansible.galaxy.api import GalaxyAPI
from ansible.errors import AnsibleError
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from units.compat import unittest
@@ -60,8 +62,7 @@ class TestGalaxy(unittest.TestCase):
cls.temp_dir = tempfile.mkdtemp(prefix='ansible-test_galaxy-')
os.chdir(cls.temp_dir)
- if os.path.exists("./delete_me"):
- shutil.rmtree("./delete_me")
+ shutil.rmtree("./delete_me", ignore_errors=True)
# creating framework for a role
gc = GalaxyCLI(args=["ansible-galaxy", "init", "--offline", "delete_me"])
@@ -71,8 +72,7 @@ class TestGalaxy(unittest.TestCase):
# making a temp dir for role installation
cls.role_path = os.path.join(tempfile.mkdtemp(), "roles")
- if not os.path.isdir(cls.role_path):
- os.makedirs(cls.role_path)
+ os.makedirs(cls.role_path)
# creating a tar file name for class data
cls.role_tar = './delete_me.tar.gz'
@@ -80,37 +80,29 @@ class TestGalaxy(unittest.TestCase):
# creating a temp file with installation requirements
cls.role_req = './delete_me_requirements.yml'
- fd = open(cls.role_req, "w")
- fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
- fd.close()
+ with open(cls.role_req, "w") as fd:
+ fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
@classmethod
def makeTar(cls, output_file, source_dir):
''' used for making a tarfile from a role directory '''
# adding directory into a tar file
- try:
- tar = tarfile.open(output_file, "w:gz")
+ with tarfile.open(output_file, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
- except AttributeError: # tarfile obj. has no attribute __exit__ prior to python 2. 7
- pass
- finally: # ensuring closure of tarfile obj
- tar.close()
@classmethod
def tearDownClass(cls):
'''After tests are finished removes things created in setUpClass'''
# deleting the temp role directory
- if os.path.exists(cls.role_dir):
- shutil.rmtree(cls.role_dir)
- if os.path.exists(cls.role_req):
+ shutil.rmtree(cls.role_dir, ignore_errors=True)
+ with contextlib.suppress(FileNotFoundError):
os.remove(cls.role_req)
- if os.path.exists(cls.role_tar):
+ with contextlib.suppress(FileNotFoundError):
os.remove(cls.role_tar)
- if os.path.isdir(cls.role_path):
- shutil.rmtree(cls.role_path)
+ shutil.rmtree(cls.role_path, ignore_errors=True)
os.chdir('/')
- shutil.rmtree(cls.temp_dir)
+ shutil.rmtree(cls.temp_dir, ignore_errors=True)
def setUp(self):
# Reset the stored command line args
@@ -137,8 +129,7 @@ class TestGalaxy(unittest.TestCase):
role_info = {'name': 'some_role_name',
'galaxy_info': galaxy_info}
display_result = gc._display_role_info(role_info)
- if display_result.find('\n\tgalaxy_info:') == -1:
- self.fail('Expected galaxy_info to be indented once')
+ self.assertNotEqual(display_result.find('\n\tgalaxy_info:'), -1, 'Expected galaxy_info to be indented once')
def test_run(self):
''' verifies that the GalaxyCLI object's api is created and that execute() is called. '''
@@ -176,7 +167,9 @@ class TestGalaxy(unittest.TestCase):
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
# testing that error expected is raised
self.assertRaises(AnsibleError, gc.run)
- self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
+ assert mocked_display.call_count == 2
+ assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
+ assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
def test_exit_without_ignore_with_flag(self):
''' tests that GalaxyCLI exits without the error specified if the --ignore-errors flag is used '''
@@ -184,7 +177,9 @@ class TestGalaxy(unittest.TestCase):
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name", "--ignore-errors"])
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
gc.run()
- self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
+ assert mocked_display.call_count == 2
+ assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
+ assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
def test_parse_no_action(self):
''' testing the options parser when no action is given '''
@@ -277,8 +272,6 @@ class ValidRoleTests(object):
# Make temp directory for testing
cls.test_dir = tempfile.mkdtemp()
- if not os.path.isdir(cls.test_dir):
- os.makedirs(cls.test_dir)
cls.role_dir = os.path.join(cls.test_dir, role_name)
cls.role_name = role_name
@@ -297,9 +290,8 @@ class ValidRoleTests(object):
cls.role_skeleton_path = gc.galaxy.default_role_skeleton_path
@classmethod
- def tearDownClass(cls):
- if os.path.isdir(cls.test_dir):
- shutil.rmtree(cls.test_dir)
+ def tearDownRole(cls):
+ shutil.rmtree(cls.test_dir, ignore_errors=True)
def test_metadata(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
@@ -349,6 +341,10 @@ class TestGalaxyInitDefault(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole(role_name='delete_me')
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownRole()
+
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -361,6 +357,10 @@ class TestGalaxyInitAPB(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_apb', galaxy_args=['--type=apb'])
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownRole()
+
def test_metadata_apb_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -391,6 +391,10 @@ class TestGalaxyInitContainer(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_container', galaxy_args=['--type=container'])
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownRole()
+
def test_metadata_container_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -422,6 +426,10 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownRole()
+
def test_empty_files_dir(self):
files_dir = os.path.join(self.role_dir, 'files')
self.assertTrue(os.path.isdir(files_dir))
@@ -763,6 +771,20 @@ def test_collection_install_with_names(collection_install):
assert mock_install.call_args[0][6] is False # force_deps
+def test_collection_install_with_invalid_requirements_format(collection_install):
+ output_dir = collection_install[2]
+
+ requirements_file = os.path.join(output_dir, 'requirements.yml')
+ with open(requirements_file, 'wb') as req_obj:
+ req_obj.write(b'"invalid"')
+
+ galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
+ '--collections-path', output_dir]
+
+ with pytest.raises(AnsibleError, match="Expecting requirements yaml to be a list or dictionary but got str"):
+ GalaxyCLI(args=galaxy_args).run()
+
+
def test_collection_install_with_requirements_file(collection_install):
mock_install, mock_warning, output_dir = collection_install
@@ -1242,12 +1264,7 @@ def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- found = False
- for mock_call in mock_display.mock_calls:
- if 'contains collections which will be ignored' in mock_call[1][0]:
- found = True
- break
- assert not found
+ assert not any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@@ -1274,12 +1291,7 @@ def test_install_explicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- found = False
- for mock_call in mock_display.mock_calls:
- if 'contains collections which will be ignored' in mock_call[1][0]:
- found = True
- break
- assert found
+ assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@@ -1306,12 +1318,7 @@ def test_install_role_with_collections_and_path(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- found = False
- for mock_call in mock_display.mock_calls:
- if 'contains collections which will be ignored' in mock_call[1][0]:
- found = True
- break
- assert found
+ assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@@ -1338,9 +1345,4 @@ def test_install_collection_with_roles(requirements_file, monkeypatch):
assert mock_role_install.call_count == 0
- found = False
- for mock_call in mock_display.mock_calls:
- if 'contains roles which will be ignored' in mock_call[1][0]:
- found = True
- break
- assert found
+ assert any(list('contains roles which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
diff --git a/test/units/cli/test_vault.py b/test/units/cli/test_vault.py
index 2304f4d..f1399c3 100644
--- a/test/units/cli/test_vault.py
+++ b/test/units/cli/test_vault.py
@@ -29,7 +29,7 @@ from units.mock.vault_helper import TextVaultSecret
from ansible import context, errors
from ansible.cli.vault import VaultCLI
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.utils import context_objects as co
@@ -171,7 +171,28 @@ class TestVaultCli(unittest.TestCase):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
cli.parse()
+ self.assertRaisesRegex(errors.AnsibleOptionsError,
+ "not a tty, editor cannot be opened",
+ cli.run)
+
+ @patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
+ @patch('ansible.cli.vault.VaultEditor')
+ def test_create_skip_tty_check(self, mock_vault_editor, mock_setup_vault_secrets):
+ mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
+ cli = VaultCLI(args=['ansible-vault', 'create', '--skip-tty-check', '/dev/null/foo'])
+ cli.parse()
+ cli.run()
+
+ @patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
+ @patch('ansible.cli.vault.VaultEditor')
+ def test_create_with_tty(self, mock_vault_editor, mock_setup_vault_secrets):
+ mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
+ self.tty_stdout_patcher = patch('ansible.cli.sys.stdout.isatty', return_value=True)
+ self.tty_stdout_patcher.start()
+ cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
+ cli.parse()
cli.run()
+ self.tty_stdout_patcher.stop()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
diff --git a/test/units/compat/mock.py b/test/units/compat/mock.py
index 58dc78e..0315460 100644
--- a/test/units/compat/mock.py
+++ b/test/units/compat/mock.py
@@ -6,7 +6,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
- from unittest.mock import (
+ from unittest.mock import ( # pylint: disable=unused-import
call,
patch,
mock_open,
diff --git a/test/units/config/manager/test_find_ini_config_file.py b/test/units/config/manager/test_find_ini_config_file.py
index df41138..e67eecd 100644
--- a/test/units/config/manager/test_find_ini_config_file.py
+++ b/test/units/config/manager/test_find_ini_config_file.py
@@ -13,7 +13,7 @@ import stat
import pytest
from ansible.config.manager import find_ini_config_file
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
real_exists = os.path.exists
real_isdir = os.path.isdir
@@ -28,22 +28,17 @@ cfg_in_homedir = os.path.expanduser('~/.ansible.cfg')
@pytest.fixture
-def setup_env(request):
+def setup_env(request, monkeypatch):
cur_config = os.environ.get('ANSIBLE_CONFIG', None)
cfg_path = request.param[0]
if cfg_path is None and cur_config:
- del os.environ['ANSIBLE_CONFIG']
+ monkeypatch.delenv('ANSIBLE_CONFIG')
else:
- os.environ['ANSIBLE_CONFIG'] = request.param[0]
+ monkeypatch.setenv('ANSIBLE_CONFIG', request.param[0])
yield
- if cur_config is None and cfg_path:
- del os.environ['ANSIBLE_CONFIG']
- else:
- os.environ['ANSIBLE_CONFIG'] = cur_config
-
@pytest.fixture
def setup_existing_files(request, monkeypatch):
@@ -54,10 +49,8 @@ def setup_existing_files(request, monkeypatch):
return False
def _os_access(path, access):
- if to_text(path) in (request.param[0]):
- return True
- else:
- return False
+ assert to_text(path) in (request.param[0])
+ return True
# Enable user and system dirs so that we know cwd takes precedence
monkeypatch.setattr("os.path.exists", _os_path_exists)
@@ -162,13 +155,11 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- if path == working_dir:
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
- else:
- return real_stat(path)
+ assert path == working_dir
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
@@ -187,13 +178,11 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- if path == working_dir:
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
- else:
- return real_stat(path)
+ assert path == working_dir
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
@@ -215,14 +204,14 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- if path == working_dir:
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
- else:
+ if path != working_dir:
return real_stat(path)
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
+
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
@@ -240,13 +229,11 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- if path == working_dir:
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
- else:
- return real_stat(path)
+ assert path == working_dir
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
diff --git a/test/units/config/test3.cfg b/test/units/config/test3.cfg
new file mode 100644
index 0000000..dab9295
--- /dev/null
+++ b/test/units/config/test3.cfg
@@ -0,0 +1,4 @@
+[colors]
+unreachable=bright red
+verbose=rgb013
+debug=gray10
diff --git a/test/units/config/test_manager.py b/test/units/config/test_manager.py
index 8ef4043..0848276 100644
--- a/test/units/config/test_manager.py
+++ b/test/units/config/test_manager.py
@@ -10,7 +10,7 @@ import os
import os.path
import pytest
-from ansible.config.manager import ConfigManager, Setting, ensure_type, resolve_path, get_config_type
+from ansible.config.manager import ConfigManager, ensure_type, resolve_path, get_config_type
from ansible.errors import AnsibleOptionsError, AnsibleError
from ansible.module_utils.six import integer_types, string_types
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
@@ -18,6 +18,7 @@ from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
curdir = os.path.dirname(__file__)
cfg_file = os.path.join(curdir, 'test.cfg')
cfg_file2 = os.path.join(curdir, 'test2.cfg')
+cfg_file3 = os.path.join(curdir, 'test3.cfg')
ensure_test_data = [
('a,b', 'list', list),
@@ -65,6 +66,15 @@ ensure_test_data = [
('None', 'none', type(None))
]
+ensure_unquoting_test_data = [
+ ('"value"', '"value"', 'str', 'env'),
+ ('"value"', '"value"', 'str', 'yaml'),
+ ('"value"', 'value', 'str', 'ini'),
+ ('\'value\'', 'value', 'str', 'ini'),
+ ('\'\'value\'\'', '\'value\'', 'str', 'ini'),
+ ('""value""', '"value"', 'str', 'ini')
+]
+
class TestConfigManager:
@classmethod
@@ -79,6 +89,11 @@ class TestConfigManager:
def test_ensure_type(self, value, expected_type, python_type):
assert isinstance(ensure_type(value, expected_type), python_type)
+ @pytest.mark.parametrize("value, expected_value, value_type, origin", ensure_unquoting_test_data)
+ def test_ensure_type_unquoting(self, value, expected_value, value_type, origin):
+ actual_value = ensure_type(value, value_type, origin)
+ assert actual_value == expected_value
+
def test_resolve_path(self):
assert os.path.join(curdir, 'test.yml') == resolve_path('./test.yml', cfg_file)
@@ -142,3 +157,16 @@ class TestConfigManager:
actual_value = ensure_type(vault_var, value_type)
assert actual_value == "vault text"
+
+
+@pytest.mark.parametrize(("key", "expected_value"), (
+ ("COLOR_UNREACHABLE", "bright red"),
+ ("COLOR_VERBOSE", "rgb013"),
+ ("COLOR_DEBUG", "gray10")))
+def test_256color_support(key, expected_value):
+ # GIVEN: a config file containing 256-color values with default definitions
+ manager = ConfigManager(cfg_file3)
+ # WHEN: get config values
+ actual_value = manager.get_config_value(key)
+ # THEN: no error
+ assert actual_value == expected_value
diff --git a/test/units/executor/module_common/conftest.py b/test/units/executor/module_common/conftest.py
new file mode 100644
index 0000000..f0eef12
--- /dev/null
+++ b/test/units/executor/module_common/conftest.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+@pytest.fixture
+def templar():
+ class FakeTemplar:
+ def template(self, template_string, *args, **kwargs):
+ return template_string
+
+ return FakeTemplar()
diff --git a/test/units/executor/module_common/test_modify_module.py b/test/units/executor/module_common/test_modify_module.py
index dceef76..89e4a16 100644
--- a/test/units/executor/module_common/test_modify_module.py
+++ b/test/units/executor/module_common/test_modify_module.py
@@ -8,9 +8,6 @@ __metaclass__ = type
import pytest
from ansible.executor.module_common import modify_module
-from ansible.module_utils.six import PY2
-
-from test_module_common import templar
FAKE_OLD_MODULE = b'''#!/usr/bin/python
@@ -22,10 +19,7 @@ print('{"result": "%s"}' % sys.executable)
@pytest.fixture
def fake_old_module_open(mocker):
m = mocker.mock_open(read_data=FAKE_OLD_MODULE)
- if PY2:
- mocker.patch('__builtin__.open', m)
- else:
- mocker.patch('builtins.open', m)
+ mocker.patch('builtins.open', m)
# this test no longer makes sense, since a Python module will always either have interpreter discovery run or
# an explicit interpreter passed (so we'll never default to the module shebang)
diff --git a/test/units/executor/module_common/test_module_common.py b/test/units/executor/module_common/test_module_common.py
index fa6add8..6e2a495 100644
--- a/test/units/executor/module_common/test_module_common.py
+++ b/test/units/executor/module_common/test_module_common.py
@@ -27,7 +27,6 @@ import ansible.errors
from ansible.executor import module_common as amc
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
-from ansible.module_utils.six import PY2
class TestStripComments:
@@ -44,15 +43,16 @@ class TestStripComments:
assert amc._strip_comments(all_comments) == u""
def test_all_whitespace(self):
- # Note: Do not remove the spaces on the blank lines below. They're
- # test data to show that the lines get removed despite having spaces
- # on them
- all_whitespace = u"""
-
-
-
-\t\t\r\n
- """ # nopep8
+ all_whitespace = (
+ '\n'
+ ' \n'
+ '\n'
+ ' \n'
+ '\t\t\r\n'
+ '\n'
+ ' '
+ )
+
assert amc._strip_comments(all_whitespace) == u""
def test_somewhat_normal(self):
@@ -80,31 +80,16 @@ class TestSlurp:
def test_slurp_file(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='This is a test')
- if PY2:
- mocker.patch('__builtin__.open', m)
- else:
- mocker.patch('builtins.open', m)
+ mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == 'This is a test'
def test_slurp_file_with_newlines(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='#!/usr/bin/python\ndef test(args):\nprint("hi")\n')
- if PY2:
- mocker.patch('__builtin__.open', m)
- else:
- mocker.patch('builtins.open', m)
+ mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == '#!/usr/bin/python\ndef test(args):\nprint("hi")\n'
-@pytest.fixture
-def templar():
- class FakeTemplar:
- def template(self, template_string, *args, **kwargs):
- return template_string
-
- return FakeTemplar()
-
-
class TestGetShebang:
"""Note: We may want to change the API of this function in the future. It isn't a great API"""
def test_no_interpreter_set(self, templar):
diff --git a/test/units/executor/module_common/test_recursive_finder.py b/test/units/executor/module_common/test_recursive_finder.py
index 8136a00..95b49d3 100644
--- a/test/units/executor/module_common/test_recursive_finder.py
+++ b/test/units/executor/module_common/test_recursive_finder.py
@@ -29,7 +29,7 @@ from io import BytesIO
import ansible.errors
from ansible.executor.module_common import recursive_finder
-
+from ansible.plugins.loader import init_plugin_loader
# These are the modules that are brought in by module_utils/basic.py This may need to be updated
# when basic.py gains new imports
@@ -42,7 +42,6 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
'ansible/module_utils/basic.py',
'ansible/module_utils/six/__init__.py',
'ansible/module_utils/_text.py',
- 'ansible/module_utils/common/_collections_compat.py',
'ansible/module_utils/common/_json_compat.py',
'ansible/module_utils/common/collections.py',
'ansible/module_utils/common/parameters.py',
@@ -79,6 +78,8 @@ ANSIBLE_LIB = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.pa
@pytest.fixture
def finder_containers():
+ init_plugin_loader()
+
FinderContainers = namedtuple('FinderContainers', ['zf'])
zipoutput = BytesIO()
diff --git a/test/units/executor/test_interpreter_discovery.py b/test/units/executor/test_interpreter_discovery.py
index 43db595..10fc64b 100644
--- a/test/units/executor/test_interpreter_discovery.py
+++ b/test/units/executor/test_interpreter_discovery.py
@@ -9,7 +9,7 @@ __metaclass__ = type
from unittest.mock import MagicMock
from ansible.executor.interpreter_discovery import discover_interpreter
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
mock_ubuntu_platform_res = to_text(
r'{"osrelease_content": "NAME=\"Ubuntu\"\nVERSION=\"16.04.5 LTS (Xenial Xerus)\"\nID=ubuntu\nID_LIKE=debian\n'
@@ -20,7 +20,7 @@ mock_ubuntu_platform_res = to_text(
def test_discovery_interpreter_linux_auto_legacy():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
@@ -35,7 +35,7 @@ def test_discovery_interpreter_linux_auto_legacy():
def test_discovery_interpreter_linux_auto_legacy_silent():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
@@ -47,7 +47,7 @@ def test_discovery_interpreter_linux_auto_legacy_silent():
def test_discovery_interpreter_linux_auto():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
index 6670888..0fc5975 100644
--- a/test/units/executor/test_play_iterator.py
+++ b/test/units/executor/test_play_iterator.py
@@ -25,6 +25,7 @@ from unittest.mock import patch, MagicMock
from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
+from ansible.plugins.loader import init_plugin_loader
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
@@ -85,7 +86,8 @@ class TestPlayIterator(unittest.TestCase):
always:
- name: role always task
debug: msg="always task in block in role"
- - include: foo.yml
+ - name: role include_tasks
+ include_tasks: foo.yml
- name: role task after include
debug: msg="after include in role"
- block:
@@ -170,12 +172,12 @@ class TestPlayIterator(unittest.TestCase):
self.assertIsNotNone(task)
self.assertEqual(task.name, "role always task")
self.assertIsNotNone(task._role)
- # role include task
- # (host_state, task) = itr.get_next_task_for_host(hosts[0])
- # self.assertIsNotNone(task)
- # self.assertEqual(task.action, 'debug')
- # self.assertEqual(task.name, "role included task")
- # self.assertIsNotNone(task._role)
+ # role include_tasks
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
+ self.assertIsNotNone(task)
+ self.assertEqual(task.action, 'include_tasks')
+ self.assertEqual(task.name, "role include_tasks")
+ self.assertIsNotNone(task._role)
# role task after include
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
@@ -286,6 +288,7 @@ class TestPlayIterator(unittest.TestCase):
self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self):
+ init_plugin_loader()
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
@@ -427,12 +430,11 @@ class TestPlayIterator(unittest.TestCase):
)
# iterate past first task
- _, task = itr.get_next_task_for_host(hosts[0])
+ dummy, task = itr.get_next_task_for_host(hosts[0])
while (task and task.action != 'debug'):
- _, task = itr.get_next_task_for_host(hosts[0])
+ dummy, task = itr.get_next_task_for_host(hosts[0])
- if task is None:
- raise Exception("iterated past end of play while looking for place to insert tasks")
+ self.assertIsNotNone(task, 'iterated past end of play while looking for place to insert tasks')
# get the current host state and copy it so we can mutate it
s = itr.get_host_state(hosts[0])
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index 315d26a..66ab003 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -25,7 +25,7 @@ from units.compat import unittest
from unittest.mock import patch, MagicMock
from ansible.errors import AnsibleError
from ansible.executor.task_executor import TaskExecutor, remove_omit
-from ansible.plugins.loader import action_loader, lookup_loader, module_loader
+from ansible.plugins.loader import action_loader, lookup_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from ansible.module_utils.six import text_type
@@ -57,6 +57,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
+ variable_manager=MagicMock(),
)
def test_task_executor_run(self):
@@ -84,6 +85,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
+ variable_manager=MagicMock(),
)
te._get_loop_items = MagicMock(return_value=None)
@@ -102,7 +104,7 @@ class TestTaskExecutor(unittest.TestCase):
self.assertIn("failed", res)
def test_task_executor_run_clean_res(self):
- te = TaskExecutor(None, MagicMock(), None, None, None, None, None, None)
+ te = TaskExecutor(None, MagicMock(), None, None, None, None, None, None, None)
te._get_loop_items = MagicMock(return_value=[1])
te._run_loop = MagicMock(
return_value=[
@@ -150,6 +152,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
+ variable_manager=MagicMock(),
)
items = te._get_loop_items()
@@ -186,6 +189,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
+ variable_manager=MagicMock(),
)
def _execute(variables):
@@ -206,6 +210,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
+ variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
@@ -214,20 +219,20 @@ class TestTaskExecutor(unittest.TestCase):
action_loader.has_plugin.return_value = True
action_loader.get.return_value = mock.sentinel.handler
- mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
te._task.action = action
+ te._connection = MagicMock()
- handler = te._get_action_handler(mock_connection, mock_templar)
+ with patch('ansible.executor.task_executor.start_connection'):
+ handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
- action_loader.has_plugin.assert_called_once_with(
- action, collection_list=te._task.collections)
+ action_loader.has_plugin.assert_called_once_with(action, collection_list=te._task.collections)
- action_loader.get.assert_called_once_with(
- te._task.action, task=te._task, connection=mock_connection,
+ action_loader.get.assert_called_with(
+ te._task.action, task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
@@ -242,6 +247,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
+ variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
@@ -251,20 +257,21 @@ class TestTaskExecutor(unittest.TestCase):
action_loader.get.return_value = mock.sentinel.handler
action_loader.__contains__.return_value = True
- mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.netconf_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
+ te._connection = MagicMock()
- handler = te._get_action_handler(mock_connection, mock_templar)
+ with patch('ansible.executor.task_executor.start_connection'):
+ handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections), # called twice
mock.call(module_prefix, collection_list=te._task.collections)])
- action_loader.get.assert_called_once_with(
- module_prefix, task=te._task, connection=mock_connection,
+ action_loader.get.assert_called_with(
+ module_prefix, task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
@@ -279,6 +286,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
+ variable_manager=MagicMock(),
)
action_loader = te._shared_loader_obj.action_loader
@@ -289,20 +297,22 @@ class TestTaskExecutor(unittest.TestCase):
context = MagicMock(resolved=False)
module_loader.find_plugin_with_context.return_value = context
- mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
- handler = te._get_action_handler(mock_connection, mock_templar)
+ te._connection = MagicMock()
+
+ with patch('ansible.executor.task_executor.start_connection'):
+ handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections),
mock.call(module_prefix, collection_list=te._task.collections)])
- action_loader.get.assert_called_once_with(
- 'ansible.legacy.normal', task=te._task, connection=mock_connection,
+ action_loader.get.assert_called_with(
+ 'ansible.legacy.normal', task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=None)
@@ -318,6 +328,7 @@ class TestTaskExecutor(unittest.TestCase):
mock_task.become = False
mock_task.retries = 0
mock_task.delay = -1
+ mock_task.delegate_to = None
mock_task.register = 'foo'
mock_task.until = None
mock_task.changed_when = None
@@ -329,6 +340,7 @@ class TestTaskExecutor(unittest.TestCase):
# other reason is that if I specify 0 here, the test fails. ;)
mock_task.async_val = 1
mock_task.poll = 0
+ mock_task.evaluate_conditional_with_result.return_value = (True, None)
mock_play_context = MagicMock()
mock_play_context.post_validate.return_value = None
@@ -343,6 +355,9 @@ class TestTaskExecutor(unittest.TestCase):
mock_action = MagicMock()
mock_queue = MagicMock()
+ mock_vm = MagicMock()
+ mock_vm.get_delegated_vars_and_hostname.return_value = {}, None
+
shared_loader = MagicMock()
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
@@ -356,11 +371,14 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
+ variable_manager=mock_vm,
)
te._get_connection = MagicMock(return_value=mock_connection)
context = MagicMock()
- te._get_action_handler_with_context = MagicMock(return_value=get_with_context_result(mock_action, context))
+
+ with patch('ansible.executor.task_executor.start_connection'):
+ te._get_action_handler_with_context = MagicMock(return_value=get_with_context_result(mock_action, context))
mock_action.run.return_value = dict(ansible_facts=dict())
res = te._execute()
@@ -392,8 +410,6 @@ class TestTaskExecutor(unittest.TestCase):
mock_play_context = MagicMock()
- mock_connection = MagicMock()
-
mock_action = MagicMock()
mock_queue = MagicMock()
@@ -412,6 +428,7 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
+ variable_manager=MagicMock(),
)
te._connection = MagicMock()
diff --git a/test/units/galaxy/test_api.py b/test/units/galaxy/test_api.py
index 064aff2..b019f1a 100644
--- a/test/units/galaxy/test_api.py
+++ b/test/units/galaxy/test_api.py
@@ -24,7 +24,7 @@ from ansible.errors import AnsibleError
from ansible.galaxy import api as galaxy_api
from ansible.galaxy.api import CollectionVersionMetadata, GalaxyAPI, GalaxyError
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken
-from ansible.module_utils._text import to_native, to_text
+from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.module_utils.six.moves.urllib import error as urllib_error
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -463,10 +463,9 @@ def test_publish_failure(api_version, collection_url, response, expected, collec
def test_wait_import_task(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- if token_ins:
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.return_value = StringIO(u'{"state":"success","finished_at":"time"}')
@@ -496,10 +495,9 @@ def test_wait_import_task(server_url, api_version, token_type, token_ins, import
def test_wait_import_task_multiple_requests(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- if token_ins:
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -543,10 +541,9 @@ def test_wait_import_task_multiple_requests(server_url, api_version, token_type,
def test_wait_import_task_with_failure(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- if token_ins:
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -620,10 +617,9 @@ def test_wait_import_task_with_failure(server_url, api_version, token_type, toke
def test_wait_import_task_with_failure_no_error(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- if token_ins:
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -693,10 +689,9 @@ def test_wait_import_task_with_failure_no_error(server_url, api_version, token_t
def test_wait_import_task_timeout(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- if token_ins:
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
def return_response(*args, **kwargs):
return StringIO(u'{"state":"waiting"}')
diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py
index 106251c..991184a 100644
--- a/test/units/galaxy/test_collection.py
+++ b/test/units/galaxy/test_collection.py
@@ -20,10 +20,11 @@ from unittest.mock import MagicMock, mock_open, patch
import ansible.constants as C
from ansible import context
-from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
+from ansible.cli import galaxy
+from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.six.moves import builtins
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -171,28 +172,6 @@ def manifest_info(manifest_template):
@pytest.fixture()
-def files_manifest_info():
- return {
- "files": [
- {
- "name": ".",
- "ftype": "dir",
- "chksum_type": None,
- "chksum_sha256": None,
- "format": 1
- },
- {
- "name": "README.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "individual_file_checksum",
- "format": 1
- }
- ],
- "format": 1}
-
-
-@pytest.fixture()
def manifest(manifest_info):
b_data = to_bytes(json.dumps(manifest_info))
@@ -245,23 +224,19 @@ def test_cli_options(required_signature_count, valid, monkeypatch):
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'False',
- 'v3': 'False',
},
# Expected server attributes
{
'validate_certs': False,
- '_available_api_versions': {},
},
),
(
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'True',
- 'v3': 'True',
},
{
'validate_certs': True,
- '_available_api_versions': {'v3': '/v3'},
},
),
],
@@ -279,7 +254,6 @@ def test_bool_type_server_config_options(config, server, monkeypatch):
"server_list=server1\n",
"[galaxy_server.server1]",
"url=%s" % config['url'],
- "v3=%s" % config['v3'],
"validate_certs=%s\n" % config['validate_certs'],
]
@@ -299,7 +273,6 @@ def test_bool_type_server_config_options(config, server, monkeypatch):
assert galaxy_cli.api_servers[0].name == 'server1'
assert galaxy_cli.api_servers[0].validate_certs == server['validate_certs']
- assert galaxy_cli.api_servers[0]._available_api_versions == server['_available_api_versions']
@pytest.mark.parametrize('global_ignore_certs', [True, False])
@@ -411,6 +384,55 @@ def test_validate_certs_server_config(ignore_certs_cfg, ignore_certs_cli, expect
assert galaxy_cli.api_servers[2].validate_certs is expected_server3_validate_certs
+@pytest.mark.parametrize(
+ ["timeout_cli", "timeout_cfg", "timeout_fallback", "expected_timeout"],
+ [
+ (None, None, None, 60),
+ (None, None, 10, 10),
+ (None, 20, 10, 20),
+ (30, 20, 10, 30),
+ ]
+)
+def test_timeout_server_config(timeout_cli, timeout_cfg, timeout_fallback, expected_timeout, monkeypatch):
+ cli_args = [
+ 'ansible-galaxy',
+ 'collection',
+ 'install',
+ 'namespace.collection:1.0.0',
+ ]
+ if timeout_cli is not None:
+ cli_args.extend(["--timeout", f"{timeout_cli}"])
+
+ cfg_lines = ["[galaxy]", "server_list=server1"]
+ if timeout_fallback is not None:
+ cfg_lines.append(f"server_timeout={timeout_fallback}")
+
+ # fix default in server config since C.GALAXY_SERVER_TIMEOUT was already evaluated
+ server_additional = galaxy.SERVER_ADDITIONAL.copy()
+ server_additional['timeout']['default'] = timeout_fallback
+ monkeypatch.setattr(galaxy, 'SERVER_ADDITIONAL', server_additional)
+
+ cfg_lines.extend(["[galaxy_server.server1]", "url=https://galaxy.ansible.com/api/"])
+ if timeout_cfg is not None:
+ cfg_lines.append(f"timeout={timeout_cfg}")
+
+ monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1'])
+
+ with tempfile.NamedTemporaryFile(suffix='.cfg') as tmp_file:
+ tmp_file.write(to_bytes('\n'.join(cfg_lines), errors='surrogate_or_strict'))
+ tmp_file.flush()
+
+ monkeypatch.setattr(C.config, '_config_file', tmp_file.name)
+ C.config._parse_config_file()
+
+ galaxy_cli = GalaxyCLI(args=cli_args)
+ mock_execute_install = MagicMock()
+ monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
+ galaxy_cli.run()
+
+ assert galaxy_cli.api_servers[0].timeout == expected_timeout
+
+
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
@@ -479,19 +501,19 @@ def test_build_with_existing_files_and_manifest(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
- manifest_file = next(m for m in members if m.path == "MANIFEST.json")
+ manifest_file = [m for m in members if m.path == "MANIFEST.json"][0]
manifest_file_obj = actual.extractfile(manifest_file.name)
manifest_file_text = manifest_file_obj.read()
manifest_file_obj.close()
assert manifest_file_text != b'{"collection_info": {"version": "6.6.6"}, "version": 1}'
- json_file = next(m for m in members if m.path == "MANIFEST.json")
+ json_file = [m for m in members if m.path == "MANIFEST.json"][0]
json_file_obj = actual.extractfile(json_file.name)
json_file_text = json_file_obj.read()
json_file_obj.close()
assert json_file_text != b'{"files": [], "format": 1}'
- sub_manifest_file = next(m for m in members if m.path == "plugins/MANIFEST.json")
+ sub_manifest_file = [m for m in members if m.path == "plugins/MANIFEST.json"][0]
sub_manifest_file_obj = actual.extractfile(sub_manifest_file.name)
sub_manifest_file_text = sub_manifest_file_obj.read()
sub_manifest_file_obj.close()
@@ -618,7 +640,7 @@ def test_build_ignore_files_and_folders(collection_input, monkeypatch):
tests_file.write('random')
tests_file.flush()
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
assert actual['format'] == 1
for manifest_entry in actual['files']:
@@ -654,7 +676,7 @@ def test_build_ignore_older_release_in_root(collection_input, monkeypatch):
file_obj.write('random')
file_obj.flush()
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
assert actual['format'] == 1
plugin_release_found = False
@@ -682,7 +704,7 @@ def test_build_ignore_patterns(collection_input, monkeypatch):
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection',
['*.md', 'plugins/action', 'playbooks/*.j2'],
- Sentinel)
+ Sentinel, None)
assert actual['format'] == 1
expected_missing = [
@@ -733,7 +755,7 @@ def test_build_ignore_symlink_target_outside_collection(collection_input, monkey
link_path = os.path.join(input_dir, 'plugins', 'connection')
os.symlink(outside_dir, link_path)
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'plugins/connection'
@@ -757,7 +779,7 @@ def test_build_copy_symlink_target_inside_collection(collection_input):
os.symlink(roles_target, roles_link)
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
assert len(linked_entries) == 1
@@ -790,11 +812,11 @@ def test_build_with_symlink_inside_collection(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
- linked_folder = next(m for m in members if m.path == 'playbooks/roles/linked')
+ linked_folder = [m for m in members if m.path == 'playbooks/roles/linked'][0]
assert linked_folder.type == tarfile.SYMTYPE
assert linked_folder.linkname == '../../roles/linked'
- linked_file = next(m for m in members if m.path == 'docs/README.md')
+ linked_file = [m for m in members if m.path == 'docs/README.md'][0]
assert linked_file.type == tarfile.SYMTYPE
assert linked_file.linkname == '../README.md'
@@ -802,7 +824,7 @@ def test_build_with_symlink_inside_collection(collection_input):
actual_file = secure_hash_s(linked_file_obj.read())
linked_file_obj.close()
- assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
+ assert actual_file == '08f24200b9fbe18903e7a50930c9d0df0b8d7da3' # shasum test/units/cli/test_data/collection_skeleton/README.md
def test_publish_no_wait(galaxy_server, collection_artifact, monkeypatch):
@@ -854,57 +876,6 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
% galaxy_server.api_server
-def test_find_existing_collections(tmp_path_factory, monkeypatch):
- test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
- concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
- collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
- collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
- fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
- fake_collection2 = os.path.join(test_dir, 'namespace4')
- os.makedirs(collection1)
- os.makedirs(collection2)
- os.makedirs(os.path.split(fake_collection1)[0])
-
- open(fake_collection1, 'wb+').close()
- open(fake_collection2, 'wb+').close()
-
- collection1_manifest = json.dumps({
- 'collection_info': {
- 'namespace': 'namespace1',
- 'name': 'collection1',
- 'version': '1.2.3',
- 'authors': ['Jordan Borean'],
- 'readme': 'README.md',
- 'dependencies': {},
- },
- 'format': 1,
- })
- with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
- manifest_obj.write(to_bytes(collection1_manifest))
-
- mock_warning = MagicMock()
- monkeypatch.setattr(Display, 'warning', mock_warning)
-
- actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm))
-
- assert len(actual) == 2
- for actual_collection in actual:
- if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1':
- assert actual_collection.namespace == 'namespace1'
- assert actual_collection.name == 'collection1'
- assert actual_collection.ver == '1.2.3'
- assert to_text(actual_collection.src) == collection1
- else:
- assert actual_collection.namespace == 'namespace2'
- assert actual_collection.name == 'collection2'
- assert actual_collection.ver == '*'
- assert to_text(actual_collection.src) == collection2
-
- assert mock_warning.call_count == 1
- assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \
- "cannot detect version." % to_text(collection2)
-
-
def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
@@ -1111,7 +1082,7 @@ def test_verify_file_hash_deleted_file(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=False)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
- assert mock_isfile.called_once
+ mock_isfile.assert_called_once()
assert len(error_queue) == 1
assert error_queue[0].installed is None
@@ -1134,7 +1105,7 @@ def test_verify_file_hash_matching_hash(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
- assert mock_isfile.called_once
+ mock_isfile.assert_called_once()
assert error_queue == []
@@ -1156,7 +1127,7 @@ def test_verify_file_hash_mismatching_hash(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', different_digest, error_queue)
- assert mock_isfile.called_once
+ mock_isfile.assert_called_once()
assert len(error_queue) == 1
assert error_queue[0].installed == digest
diff --git a/test/units/galaxy/test_collection_install.py b/test/units/galaxy/test_collection_install.py
index 2118f0e..a61ae40 100644
--- a/test/units/galaxy/test_collection_install.py
+++ b/test/units/galaxy/test_collection_install.py
@@ -18,7 +18,6 @@ import yaml
from io import BytesIO, StringIO
from unittest.mock import MagicMock, patch
-from unittest import mock
import ansible.module_utils.six.moves.urllib.error as urllib_error
@@ -27,7 +26,7 @@ from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection, api, dependency_resolution
from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.process import get_bin_path
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -53,78 +52,6 @@ def call_galaxy_cli(args):
co.GlobalCLIArgs._Singleton__instance = orig
-def artifact_json(namespace, name, version, dependencies, server):
- json_str = json.dumps({
- 'artifact': {
- 'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
- 'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
- 'size': 1234,
- },
- 'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
- 'metadata': {
- 'namespace': namespace,
- 'name': name,
- 'dependencies': dependencies,
- },
- 'version': version
- })
- return to_text(json_str)
-
-
-def artifact_versions_json(namespace, name, versions, galaxy_api, available_api_versions=None):
- results = []
- available_api_versions = available_api_versions or {}
- api_version = 'v2'
- if 'v3' in available_api_versions:
- api_version = 'v3'
- for version in versions:
- results.append({
- 'href': '%s/api/%s/%s/%s/versions/%s/' % (galaxy_api.api_server, api_version, namespace, name, version),
- 'version': version,
- })
-
- if api_version == 'v2':
- json_str = json.dumps({
- 'count': len(versions),
- 'next': None,
- 'previous': None,
- 'results': results
- })
-
- if api_version == 'v3':
- response = {'meta': {'count': len(versions)},
- 'data': results,
- 'links': {'first': None,
- 'last': None,
- 'next': None,
- 'previous': None},
- }
- json_str = json.dumps(response)
- return to_text(json_str)
-
-
-def error_json(galaxy_api, errors_to_return=None, available_api_versions=None):
- errors_to_return = errors_to_return or []
- available_api_versions = available_api_versions or {}
-
- response = {}
-
- api_version = 'v2'
- if 'v3' in available_api_versions:
- api_version = 'v3'
-
- if api_version == 'v2':
- assert len(errors_to_return) <= 1
- if errors_to_return:
- response = errors_to_return[0]
-
- if api_version == 'v3':
- response['errors'] = errors_to_return
-
- json_str = json.dumps(response)
- return to_text(json_str)
-
-
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
@@ -371,6 +298,27 @@ def test_build_requirement_from_tar(collection_artifact):
assert actual.ver == u'0.1.0'
+def test_build_requirement_from_tar_url(tmp_path_factory):
+ test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
+ concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
+ test_url = 'https://example.com/org/repo/sample.tar.gz'
+ expected = fr"^Failed to download collection tar from '{to_text(test_url)}'"
+
+ with pytest.raises(AnsibleError, match=expected):
+ Requirement.from_requirement_dict({'name': test_url, 'type': 'url'}, concrete_artifact_cm)
+
+
+def test_build_requirement_from_tar_url_wrong_type(tmp_path_factory):
+ test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
+ concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
+ test_url = 'https://example.com/org/repo/sample.tar.gz'
+ expected = fr"^Unable to find collection artifact file at '{to_text(test_url)}'\.$"
+
+ with pytest.raises(AnsibleError, match=expected):
+ # Specified wrong collection type for http URL
+ Requirement.from_requirement_dict({'name': test_url, 'type': 'file'}, concrete_artifact_cm)
+
+
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
test_file = os.path.join(test_dir, b'fake.tar.gz')
@@ -895,7 +843,8 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
+ collection.install_collections(
+ requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
@@ -919,57 +868,6 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
-def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
- collection_path, collection_tar = collection_artifact
- temp_path = os.path.split(collection_tar)[0]
-
- mock_display = MagicMock()
- monkeypatch.setattr(Display, 'display', mock_display)
-
- concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
-
- assert os.path.isdir(collection_path)
-
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
-
- assert os.path.isdir(collection_path)
-
- actual_files = os.listdir(collection_path)
- actual_files.sort()
- assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles', b'runme.sh']
-
- # Filter out the progress cursor display calls.
- display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
- assert len(display_msgs) == 1
-
- assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.'
-
- for msg in display_msgs:
- assert 'WARNING' not in msg
-
-
-def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
- collection_path, collection_tar = collection_artifact
- temp_path = os.path.split(collection_tar)[0]
-
- mock_display = MagicMock()
- monkeypatch.setattr(Display, 'display', mock_display)
-
- for file in [b'MANIFEST.json', b'galaxy.yml']:
- b_path = os.path.join(collection_path, file)
- if os.path.isfile(b_path):
- os.unlink(b_path)
-
- concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
-
- display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
-
- assert 'WARNING' in display_msgs[0]
-
-
# Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'},
@@ -984,7 +882,8 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
+ collection.install_collections(
+ requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
@@ -1021,7 +920,8 @@ def test_install_collection_with_no_dependency(collection_artifact, monkeypatch)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
+ collection.install_collections(
+ requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
diff --git a/test/units/galaxy/test_role_install.py b/test/units/galaxy/test_role_install.py
index 687fcac..819ed18 100644
--- a/test/units/galaxy/test_role_install.py
+++ b/test/units/galaxy/test_role_install.py
@@ -7,6 +7,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import json
import os
import functools
import pytest
@@ -16,7 +17,7 @@ from io import StringIO
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import api, role, Galaxy
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.utils import context_objects as co
@@ -24,7 +25,7 @@ def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
- GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
+ return GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
@@ -120,6 +121,22 @@ def test_role_download_github_no_download_url_for_version(init_mock_temp_file, m
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
+@pytest.mark.parametrize(
+ 'state,rc',
+ [('SUCCESS', 0), ('FAILED', 1),]
+)
+def test_role_import(state, rc, mocker, galaxy_server, monkeypatch):
+ responses = [
+ {"available_versions": {"v1": "v1/"}},
+ {"results": [{'id': 12345, 'github_user': 'user', 'github_repo': 'role', 'github_reference': None, 'summary_fields': {'role': {'name': 'role'}}}]},
+ {"results": [{'state': 'WAITING', 'id': 12345, 'summary_fields': {'task_messages': []}}]},
+ {"results": [{'state': state, 'id': 12345, 'summary_fields': {'task_messages': []}}]},
+ ]
+ mock_api = mocker.MagicMock(side_effect=[StringIO(json.dumps(rsp)) for rsp in responses])
+ monkeypatch.setattr(api, 'open_url', mock_api)
+ assert call_galaxy_cli(['import', 'user', 'role']) == rc
+
+
def test_role_download_url(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
diff --git a/test/units/galaxy/test_token.py b/test/units/galaxy/test_token.py
index 24af386..9fc12d4 100644
--- a/test/units/galaxy/test_token.py
+++ b/test/units/galaxy/test_token.py
@@ -13,7 +13,7 @@ from unittest.mock import MagicMock
import ansible.constants as C
from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.galaxy.token import GalaxyToken, NoTokenSentinel
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
@pytest.fixture()
diff --git a/test/units/inventory/test_host.py b/test/units/inventory/test_host.py
index c8f4771..712ed30 100644
--- a/test/units/inventory/test_host.py
+++ b/test/units/inventory/test_host.py
@@ -69,10 +69,10 @@ class TestHost(unittest.TestCase):
def test_equals_none(self):
other = None
- self.hostA == other
- other == self.hostA
- self.hostA != other
- other != self.hostA
+ assert not (self.hostA == other)
+ assert not (other == self.hostA)
+ assert self.hostA != other
+ assert other != self.hostA
self.assertNotEqual(self.hostA, other)
def test_serialize(self):
diff --git a/test/units/mock/loader.py b/test/units/mock/loader.py
index f6ceb37..9dc32ca 100644
--- a/test/units/mock/loader.py
+++ b/test/units/mock/loader.py
@@ -21,16 +21,15 @@ __metaclass__ = type
import os
-from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
- assert type(file_mapping) == dict
+ assert isinstance(file_mapping, dict)
super(DictDataLoader, self).__init__()
@@ -48,11 +47,7 @@ class DictDataLoader(DataLoader):
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8
def _get_file_contents(self, file_name):
- path = to_text(file_name)
- if path in self._file_mapping:
- return to_bytes(self._file_mapping[file_name]), False
- else:
- raise AnsibleParserError("file not found: %s" % file_name)
+ return to_bytes(self._file_mapping[file_name]), False
def path_exists(self, path):
path = to_text(path)
@@ -91,25 +86,6 @@ class DictDataLoader(DataLoader):
self._add_known_directory(dirname)
dirname = os.path.dirname(dirname)
- def push(self, path, content):
- rebuild_dirs = False
- if path not in self._file_mapping:
- rebuild_dirs = True
-
- self._file_mapping[path] = content
-
- if rebuild_dirs:
- self._build_known_directories()
-
- def pop(self, path):
- if path in self._file_mapping:
- del self._file_mapping[path]
- self._build_known_directories()
-
- def clear(self):
- self._file_mapping = dict()
- self._known_directories = []
-
def get_basedir(self):
return os.getcwd()
diff --git a/test/units/mock/procenv.py b/test/units/mock/procenv.py
index 271a207..1570c87 100644
--- a/test/units/mock/procenv.py
+++ b/test/units/mock/procenv.py
@@ -27,7 +27,7 @@ from contextlib import contextmanager
from io import BytesIO, StringIO
from units.compat import unittest
from ansible.module_utils.six import PY3
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
@contextmanager
@@ -54,30 +54,9 @@ def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
sys.argv = real_argv
-@contextmanager
-def swap_stdout():
- """
- context manager that temporarily replaces stdout for tests that need to verify output
- """
- old_stdout = sys.stdout
-
- if PY3:
- fake_stream = StringIO()
- else:
- fake_stream = BytesIO()
-
- try:
- sys.stdout = fake_stream
-
- yield fake_stream
- finally:
- sys.stdout = old_stdout
-
-
class ModuleTestCase(unittest.TestCase):
- def setUp(self, module_args=None):
- if module_args is None:
- module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
+ def setUp(self):
+ module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
diff --git a/test/units/mock/vault_helper.py b/test/units/mock/vault_helper.py
index dcce9c7..5b2fdd2 100644
--- a/test/units/mock/vault_helper.py
+++ b/test/units/mock/vault_helper.py
@@ -15,7 +15,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.parsing.vault import VaultSecret
diff --git a/test/units/mock/yaml_helper.py b/test/units/mock/yaml_helper.py
index 1ef1721..9f8b063 100644
--- a/test/units/mock/yaml_helper.py
+++ b/test/units/mock/yaml_helper.py
@@ -4,8 +4,6 @@ __metaclass__ = type
import io
import yaml
-from ansible.module_utils.six import PY3
-from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
@@ -15,21 +13,14 @@ class YamlTestUtils(object):
"""Vault related tests will want to override this.
Vault cases should setup a AnsibleLoader that has the vault password."""
- return AnsibleLoader(stream)
def _dump_stream(self, obj, stream, dumper=None):
"""Dump to a py2-unicode or py3-string stream."""
- if PY3:
- return yaml.dump(obj, stream, Dumper=dumper)
- else:
- return yaml.dump(obj, stream, Dumper=dumper, encoding=None)
+ return yaml.dump(obj, stream, Dumper=dumper)
def _dump_string(self, obj, dumper=None):
"""Dump to a py2-unicode or py3-string"""
- if PY3:
- return yaml.dump(obj, Dumper=dumper)
- else:
- return yaml.dump(obj, Dumper=dumper, encoding=None)
+ return yaml.dump(obj, Dumper=dumper)
def _dump_load_cycle(self, obj):
# Each pass though a dump or load revs the 'generation'
@@ -62,63 +53,3 @@ class YamlTestUtils(object):
# should be transitive, but...
self.assertEqual(obj_2, obj_3)
self.assertEqual(string_from_object_dump, string_from_object_dump_3)
-
- def _old_dump_load_cycle(self, obj):
- '''Dump the passed in object to yaml, load it back up, dump again, compare.'''
- stream = io.StringIO()
-
- yaml_string = self._dump_string(obj, dumper=AnsibleDumper)
- self._dump_stream(obj, stream, dumper=AnsibleDumper)
-
- yaml_string_from_stream = stream.getvalue()
-
- # reset stream
- stream.seek(0)
-
- loader = self._loader(stream)
- # loader = AnsibleLoader(stream, vault_password=self.vault_password)
- obj_from_stream = loader.get_data()
-
- stream_from_string = io.StringIO(yaml_string)
- loader2 = self._loader(stream_from_string)
- # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)
- obj_from_string = loader2.get_data()
-
- stream_obj_from_stream = io.StringIO()
- stream_obj_from_string = io.StringIO()
-
- if PY3:
- yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper)
- yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper)
- else:
- yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper, encoding=None)
- yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper, encoding=None)
-
- yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue()
- yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue()
-
- stream_obj_from_stream.seek(0)
- stream_obj_from_string.seek(0)
-
- if PY3:
- yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper)
- yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
- else:
- yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper, encoding=None)
- yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper, encoding=None)
-
- assert yaml_string == yaml_string_obj_from_stream
- assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
- assert (yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string == yaml_string_stream_obj_from_stream ==
- yaml_string_stream_obj_from_string)
- assert obj == obj_from_stream
- assert obj == obj_from_string
- assert obj == yaml_string_obj_from_stream
- assert obj == yaml_string_obj_from_string
- assert obj == obj_from_stream == obj_from_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
- return {'obj': obj,
- 'yaml_string': yaml_string,
- 'yaml_string_from_stream': yaml_string_from_stream,
- 'obj_from_stream': obj_from_stream,
- 'obj_from_string': obj_from_string,
- 'yaml_string_obj_from_string': yaml_string_obj_from_string}
diff --git a/test/units/module_utils/basic/test__symbolic_mode_to_octal.py b/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
index 7793b34..b3a73e5 100644
--- a/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
+++ b/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
@@ -63,6 +63,14 @@ DATA = ( # Going from no permissions to setting all for user, group, and/or oth
# Multiple permissions
(0o040000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0755),
(0o100000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0644),
+ (0o040000, u'ug=rx,o=', 0o0550),
+ (0o100000, u'ug=rx,o=', 0o0550),
+ (0o040000, u'u=rx,g=r', 0o0540),
+ (0o100000, u'u=rx,g=r', 0o0540),
+ (0o040777, u'ug=rx,o=', 0o0550),
+ (0o100777, u'ug=rx,o=', 0o0550),
+ (0o040777, u'u=rx,g=r', 0o0547),
+ (0o100777, u'u=rx,g=r', 0o0547),
)
UMASK_DATA = (
diff --git a/test/units/module_utils/basic/test_argument_spec.py b/test/units/module_utils/basic/test_argument_spec.py
index 211d65a..5dbaf50 100644
--- a/test/units/module_utils/basic/test_argument_spec.py
+++ b/test/units/module_utils/basic/test_argument_spec.py
@@ -453,7 +453,7 @@ class TestComplexOptions:
'bar1': None, 'bar2': None, 'bar3': None, 'bar4': None}]
),
# Check for elements in sub-options
- ({"foobar": [{"foo": "good", "bam": "required_one_of", "bar1": [1, "good", "yes"], "bar2": ['1', 1], "bar3":['1.3', 1.3, 1]}]},
+ ({"foobar": [{"foo": "good", "bam": "required_one_of", "bar1": [1, "good", "yes"], "bar2": ['1', 1], "bar3": ['1.3', 1.3, 1]}]},
[{'foo': 'good', 'bam1': None, 'bam2': 'test', 'bam3': None, 'bam4': None, 'bar': None, 'baz': None, 'bam': 'required_one_of',
'bar1': ["1", "good", "yes"], 'bar2': [1, 1], 'bar3': [1.3, 1.3, 1.0], 'bar4': None}]
),
diff --git a/test/units/module_utils/basic/test_command_nonexisting.py b/test/units/module_utils/basic/test_command_nonexisting.py
index 6ed7f91..0dd3bd9 100644
--- a/test/units/module_utils/basic/test_command_nonexisting.py
+++ b/test/units/module_utils/basic/test_command_nonexisting.py
@@ -1,14 +1,11 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import sys
-import pytest
import json
import sys
import pytest
import subprocess
-import ansible.module_utils.basic
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/basic/test_filesystem.py b/test/units/module_utils/basic/test_filesystem.py
index f09cecf..50e674c 100644
--- a/test/units/module_utils/basic/test_filesystem.py
+++ b/test/units/module_utils/basic/test_filesystem.py
@@ -143,6 +143,8 @@ class TestOtherFilesystem(ModuleTestCase):
argument_spec=dict(),
)
+ am.selinux_enabled = lambda: False
+
file_args = {
'path': '/path/to/file',
'mode': None,
diff --git a/test/units/module_utils/basic/test_get_available_hash_algorithms.py b/test/units/module_utils/basic/test_get_available_hash_algorithms.py
new file mode 100644
index 0000000..d60f34c
--- /dev/null
+++ b/test/units/module_utils/basic/test_get_available_hash_algorithms.py
@@ -0,0 +1,60 @@
+"""Unit tests to provide coverage not easily obtained from integration tests."""
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import hashlib
+import sys
+
+import pytest
+
+from ansible.module_utils.basic import _get_available_hash_algorithms
+
+
+@pytest.mark.skipif(sys.version_info < (2, 7, 9), reason="requires Python 2.7.9 or later")
+def test_unavailable_algorithm(mocker):
+ """Simulate an available algorithm that isn't."""
+ expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
+
+ mocker.patch('hashlib.algorithms_available', expected_algorithms | {'not_actually_available'})
+
+ available_algorithms = _get_available_hash_algorithms()
+
+ assert sorted(expected_algorithms) == sorted(available_algorithms)
+
+
+@pytest.mark.skipif(sys.version_info < (2, 7, 9), reason="requires Python 2.7.9 or later")
+def test_fips_mode(mocker):
+ """Simulate running in FIPS mode on Python 2.7.9 or later."""
+ expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
+
+ mocker.patch('hashlib.algorithms_available', expected_algorithms | {'md5'})
+ mocker.patch('hashlib.md5').side_effect = ValueError() # using md5 in FIPS mode raises a ValueError
+
+ available_algorithms = _get_available_hash_algorithms()
+
+ assert sorted(expected_algorithms) == sorted(available_algorithms)
+
+
+@pytest.mark.skipif(sys.version_info < (2, 7, 9) or sys.version_info[:2] != (2, 7), reason="requires Python 2.7 (2.7.9 or later)")
+def test_legacy_python(mocker):
+ """Simulate behavior on Python 2.7.x earlier than Python 2.7.9."""
+ expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
+
+ # This attribute is exclusive to Python 2.7.
+ # Since `hashlib.algorithms_available` is used on Python 2.7.9 and later, only Python 2.7.0 through 2.7.8 utilize this attribute.
+ mocker.patch('hashlib.algorithms', expected_algorithms)
+
+ saved_algorithms = hashlib.algorithms_available
+
+ # Make sure that this attribute is unavailable, to simulate running on Python 2.7.0 through 2.7.8.
+ # It will be restored immediately after performing the test.
+ del hashlib.algorithms_available
+
+ try:
+ available_algorithms = _get_available_hash_algorithms()
+ finally:
+ hashlib.algorithms_available = saved_algorithms
+
+ assert sorted(expected_algorithms) == sorted(available_algorithms)
diff --git a/test/units/module_utils/basic/test_run_command.py b/test/units/module_utils/basic/test_run_command.py
index 04211e2..259ac6c 100644
--- a/test/units/module_utils/basic/test_run_command.py
+++ b/test/units/module_utils/basic/test_run_command.py
@@ -12,7 +12,7 @@ from io import BytesIO
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.six import PY2
from ansible.module_utils.compat import selectors
@@ -109,7 +109,7 @@ def mock_subprocess(mocker):
super(MockSelector, self).close()
self._file_objs = []
- selectors.DefaultSelector = MockSelector
+ selectors.PollSelector = MockSelector
subprocess = mocker.patch('ansible.module_utils.basic.subprocess')
subprocess._output = {mocker.sentinel.stdout: SpecialBytesIO(b'', fh=mocker.sentinel.stdout),
@@ -194,7 +194,7 @@ class TestRunCommandPrompt:
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_prompt_no_match(self, mocker, rc_am):
rc_am._os._cmd_out[mocker.sentinel.stdout] = BytesIO(b'hello')
- (rc, _, _) = rc_am.run_command('foo', prompt_regex='[pP]assword:')
+ (rc, stdout, stderr) = rc_am.run_command('foo', prompt_regex='[pP]assword:')
assert rc == 0
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
@@ -204,7 +204,7 @@ class TestRunCommandPrompt:
fh=mocker.sentinel.stdout),
mocker.sentinel.stderr:
SpecialBytesIO(b'', fh=mocker.sentinel.stderr)}
- (rc, _, _) = rc_am.run_command('foo', prompt_regex=r'[pP]assword:', data=None)
+ (rc, stdout, stderr) = rc_am.run_command('foo', prompt_regex=r'[pP]assword:', data=None)
assert rc == 257
@@ -212,7 +212,7 @@ class TestRunCommandRc:
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_check_rc_false(self, rc_am):
rc_am._subprocess.Popen.return_value.returncode = 1
- (rc, _, _) = rc_am.run_command('/bin/false', check_rc=False)
+ (rc, stdout, stderr) = rc_am.run_command('/bin/false', check_rc=False)
assert rc == 1
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
diff --git a/test/units/module_utils/basic/test_safe_eval.py b/test/units/module_utils/basic/test_safe_eval.py
index e8538ca..fdaab18 100644
--- a/test/units/module_utils/basic/test_safe_eval.py
+++ b/test/units/module_utils/basic/test_safe_eval.py
@@ -67,4 +67,4 @@ def test_invalid_strings_with_exceptions(am, code, expected, exception):
if exception is None:
assert res[1] == exception
else:
- assert type(res[1]) == exception
+ assert isinstance(res[1], exception)
diff --git a/test/units/module_utils/basic/test_sanitize_keys.py b/test/units/module_utils/basic/test_sanitize_keys.py
index 180f866..3edb216 100644
--- a/test/units/module_utils/basic/test_sanitize_keys.py
+++ b/test/units/module_utils/basic/test_sanitize_keys.py
@@ -6,7 +6,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import pytest
from ansible.module_utils.basic import sanitize_keys
diff --git a/test/units/module_utils/basic/test_selinux.py b/test/units/module_utils/basic/test_selinux.py
index d855768..bdb6b9d 100644
--- a/test/units/module_utils/basic/test_selinux.py
+++ b/test/units/module_utils/basic/test_selinux.py
@@ -43,16 +43,21 @@ class TestSELinuxMU:
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_enabled() is False
- # test selinux present/not-enabled
- disabled_mod = no_args_module()
- with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=0):
- assert disabled_mod.selinux_enabled() is False
+ # test selinux present/not-enabled
+ disabled_mod = no_args_module()
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.is_selinux_enabled.return_value = 0
+ assert disabled_mod.selinux_enabled() is False
+
# ensure value is cached (same answer after unpatching)
assert disabled_mod.selinux_enabled() is False
+
# and present / enabled
- enabled_mod = no_args_module()
- with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=1):
- assert enabled_mod.selinux_enabled() is True
+ with patch.object(basic, 'HAVE_SELINUX', True):
+ enabled_mod = no_args_module()
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.is_selinux_enabled.return_value = 1
+ assert enabled_mod.selinux_enabled() is True
# ensure value is cached (same answer after unpatching)
assert enabled_mod.selinux_enabled() is True
@@ -60,12 +65,16 @@ class TestSELinuxMU:
# selinux unavailable, should return false
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_mls_enabled() is False
- # selinux disabled, should return false
- with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=0):
- assert no_args_module(selinux_enabled=False).selinux_mls_enabled() is False
- # selinux enabled, should pass through the value of is_selinux_mls_enabled
- with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=1):
- assert no_args_module(selinux_enabled=True).selinux_mls_enabled() is True
+ # selinux disabled, should return false
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.is_selinux_mls_enabled.return_value = 0
+ assert no_args_module(selinux_enabled=False).selinux_mls_enabled() is False
+
+ with patch.object(basic, 'HAVE_SELINUX', True):
+ # selinux enabled, should pass through the value of is_selinux_mls_enabled
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.is_selinux_mls_enabled.return_value = 1
+ assert no_args_module(selinux_enabled=True).selinux_mls_enabled() is True
def test_selinux_initial_context(self):
# selinux missing/disabled/enabled sans MLS is 3-element None
@@ -80,16 +89,19 @@ class TestSELinuxMU:
assert no_args_module().selinux_default_context(path='/foo/bar') == [None, None, None]
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
- # matchpathcon success
- with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ # matchpathcon success
+ selinux.matchpathcon.return_value = [0, 'unconfined_u:object_r:default_t:s0']
assert am.selinux_default_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
- # matchpathcon fail (return initial context value)
- with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[-1, '']):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ # matchpathcon fail (return initial context value)
+ selinux.matchpathcon.return_value = [-1, '']
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
- # matchpathcon OSError
- with patch('ansible.module_utils.compat.selinux.matchpathcon', side_effect=OSError):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ # matchpathcon OSError
+ selinux.matchpathcon.side_effect = OSError
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
def test_selinux_context(self):
@@ -99,19 +111,23 @@ class TestSELinuxMU:
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
# lgetfilecon_raw passthru
- with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lgetfilecon_raw.return_value = [0, 'unconfined_u:object_r:default_t:s0']
assert am.selinux_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
# lgetfilecon_raw returned a failure
- with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[-1, '']):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lgetfilecon_raw.return_value = [-1, '']
assert am.selinux_context(path='/foo/bar') == [None, None, None, None]
# lgetfilecon_raw OSError (should bomb the module)
- with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError(errno.ENOENT, 'NotFound')):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lgetfilecon_raw.side_effect = OSError(errno.ENOENT, 'NotFound')
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
- with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError()):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lgetfilecon_raw.side_effect = OSError()
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
@@ -166,25 +182,29 @@ class TestSELinuxMU:
am.selinux_context = lambda path: ['bar_u', 'bar_r', None, None]
am.is_special_selinux_path = lambda path: (False, None)
- with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lsetfilecon.return_value = 0
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- m.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
- m.reset_mock()
+ selinux.lsetfilecon.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
+ selinux.lsetfilecon.reset_mock()
am.check_mode = True
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- assert not m.called
+ assert not selinux.lsetfilecon.called
am.check_mode = False
- with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=1):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lsetfilecon.return_value = 1
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
- with patch('ansible.module_utils.compat.selinux.lsetfilecon', side_effect=OSError):
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lsetfilecon.side_effect = OSError
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
am.is_special_selinux_path = lambda path: (True, ['sp_u', 'sp_r', 'sp_t', 's0'])
- with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
+ with patch.object(basic, 'selinux', create=True) as selinux:
+ selinux.lsetfilecon.return_value = 0
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- m.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
+ selinux.lsetfilecon.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
diff --git a/test/units/module_utils/basic/test_set_cwd.py b/test/units/module_utils/basic/test_set_cwd.py
index 159236b..c094c62 100644
--- a/test/units/module_utils/basic/test_set_cwd.py
+++ b/test/units/module_utils/basic/test_set_cwd.py
@@ -8,13 +8,10 @@ __metaclass__ = type
import json
import os
-import shutil
import tempfile
-import pytest
-
-from units.compat.mock import patch, MagicMock
-from ansible.module_utils._text import to_bytes
+from units.compat.mock import patch
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/basic/test_tmpdir.py b/test/units/module_utils/basic/test_tmpdir.py
index 818cb9b..ec12508 100644
--- a/test/units/module_utils/basic/test_tmpdir.py
+++ b/test/units/module_utils/basic/test_tmpdir.py
@@ -14,7 +14,7 @@ import tempfile
import pytest
from units.compat.mock import patch, MagicMock
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/common/arg_spec/test_aliases.py b/test/units/module_utils/common/arg_spec/test_aliases.py
index 7d30fb0..7522c76 100644
--- a/test/units/module_utils/common/arg_spec/test_aliases.py
+++ b/test/units/module_utils/common/arg_spec/test_aliases.py
@@ -9,7 +9,6 @@ import pytest
from ansible.module_utils.errors import AnsibleValidationError, AnsibleValidationErrorMultiple
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator, ValidationResult
-from ansible.module_utils.common.warnings import get_deprecation_messages, get_warning_messages
# id, argument spec, parameters, expected parameters, deprecation, warning
ALIAS_TEST_CASES = [
diff --git a/test/units/module_utils/common/parameters/test_handle_aliases.py b/test/units/module_utils/common/parameters/test_handle_aliases.py
index e20a888..6a8c2b2 100644
--- a/test/units/module_utils/common/parameters/test_handle_aliases.py
+++ b/test/units/module_utils/common/parameters/test_handle_aliases.py
@@ -9,7 +9,7 @@ __metaclass__ = type
import pytest
from ansible.module_utils.common.parameters import _handle_aliases
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
def test_handle_aliases_no_aliases():
diff --git a/test/units/module_utils/common/parameters/test_list_deprecations.py b/test/units/module_utils/common/parameters/test_list_deprecations.py
index 6f0bb71..d667a2f 100644
--- a/test/units/module_utils/common/parameters/test_list_deprecations.py
+++ b/test/units/module_utils/common/parameters/test_list_deprecations.py
@@ -5,21 +5,10 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import pytest
from ansible.module_utils.common.parameters import _list_deprecations
-@pytest.fixture
-def params():
- return {
- 'name': 'bob',
- 'dest': '/etc/hosts',
- 'state': 'present',
- 'value': 5,
- }
-
-
def test_list_deprecations():
argument_spec = {
'old': {'type': 'str', 'removed_in_version': '2.5'},
diff --git a/test/units/module_utils/common/test_collections.py b/test/units/module_utils/common/test_collections.py
index 95b2a40..8424502 100644
--- a/test/units/module_utils/common/test_collections.py
+++ b/test/units/module_utils/common/test_collections.py
@@ -8,8 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.six import Iterator
-from ansible.module_utils.common._collections_compat import Sequence
+from ansible.module_utils.six.moves.collections_abc import Sequence
from ansible.module_utils.common.collections import ImmutableDict, is_iterable, is_sequence
@@ -25,16 +24,6 @@ class SeqStub:
Sequence.register(SeqStub)
-class IteratorStub(Iterator):
- def __next__(self):
- raise StopIteration
-
-
-class IterableStub:
- def __iter__(self):
- return IteratorStub()
-
-
class FakeAnsibleVaultEncryptedUnicode(Sequence):
__ENCRYPTED__ = True
@@ -42,10 +31,10 @@ class FakeAnsibleVaultEncryptedUnicode(Sequence):
self.data = data
def __getitem__(self, index):
- return self.data[index]
+ raise NotImplementedError() # pragma: nocover
def __len__(self):
- return len(self.data)
+ raise NotImplementedError() # pragma: nocover
TEST_STRINGS = u'he', u'Україна', u'Česká republika'
@@ -93,14 +82,14 @@ def test_sequence_string_types_without_strings(string_input):
@pytest.mark.parametrize(
'seq',
- ([], (), {}, set(), frozenset(), IterableStub()),
+ ([], (), {}, set(), frozenset()),
)
def test_iterable_positive(seq):
assert is_iterable(seq)
@pytest.mark.parametrize(
- 'seq', (IteratorStub(), object(), 5, 9.)
+ 'seq', (object(), 5, 9.)
)
def test_iterable_negative(seq):
assert not is_iterable(seq)
diff --git a/test/units/module_utils/common/text/converters/test_json_encode_fallback.py b/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
index 022f38f..808bf41 100644
--- a/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
+++ b/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
@@ -20,12 +20,6 @@ class timezone(tzinfo):
def utcoffset(self, dt):
return self._offset
- def dst(self, dt):
- return timedelta(0)
-
- def tzname(self, dt):
- return None
-
@pytest.mark.parametrize(
'test_input,expected',
diff --git a/test/units/module_utils/common/validation/test_check_missing_parameters.py b/test/units/module_utils/common/validation/test_check_missing_parameters.py
index 6cbcb8b..364f943 100644
--- a/test/units/module_utils/common/validation/test_check_missing_parameters.py
+++ b/test/units/module_utils/common/validation/test_check_missing_parameters.py
@@ -8,16 +8,10 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
-from ansible.module_utils.common.validation import check_required_one_of
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_missing_parameters
-@pytest.fixture
-def arguments_terms():
- return {"path": ""}
-
-
def test_check_missing_parameters():
assert check_missing_parameters([], {}) == []
diff --git a/test/units/module_utils/common/validation/test_check_mutually_exclusive.py b/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
index 7bf9076..acc67be 100644
--- a/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
+++ b/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_mutually_exclusive
diff --git a/test/units/module_utils/common/validation/test_check_required_arguments.py b/test/units/module_utils/common/validation/test_check_required_arguments.py
index 1dd5458..eb3d52e 100644
--- a/test/units/module_utils/common/validation/test_check_required_arguments.py
+++ b/test/units/module_utils/common/validation/test_check_required_arguments.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_required_arguments
diff --git a/test/units/module_utils/common/validation/test_check_required_by.py b/test/units/module_utils/common/validation/test_check_required_by.py
index 62cccff..fcba0c1 100644
--- a/test/units/module_utils/common/validation/test_check_required_by.py
+++ b/test/units/module_utils/common/validation/test_check_required_by.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_required_by
diff --git a/test/units/module_utils/common/validation/test_check_required_if.py b/test/units/module_utils/common/validation/test_check_required_if.py
index 4189164..4590b05 100644
--- a/test/units/module_utils/common/validation/test_check_required_if.py
+++ b/test/units/module_utils/common/validation/test_check_required_if.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_required_if
diff --git a/test/units/module_utils/common/validation/test_check_required_one_of.py b/test/units/module_utils/common/validation/test_check_required_one_of.py
index b081889..efdba53 100644
--- a/test/units/module_utils/common/validation/test_check_required_one_of.py
+++ b/test/units/module_utils/common/validation/test_check_required_one_of.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_required_one_of
diff --git a/test/units/module_utils/common/validation/test_check_required_together.py b/test/units/module_utils/common/validation/test_check_required_together.py
index 8a2daab..cf4626a 100644
--- a/test/units/module_utils/common/validation/test_check_required_together.py
+++ b/test/units/module_utils/common/validation/test_check_required_together.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_required_together
diff --git a/test/units/module_utils/common/validation/test_check_type_bits.py b/test/units/module_utils/common/validation/test_check_type_bits.py
index 7f6b11d..aa91da9 100644
--- a/test/units/module_utils/common/validation/test_check_type_bits.py
+++ b/test/units/module_utils/common/validation/test_check_type_bits.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_bits
diff --git a/test/units/module_utils/common/validation/test_check_type_bool.py b/test/units/module_utils/common/validation/test_check_type_bool.py
index bd867dc..00b785f 100644
--- a/test/units/module_utils/common/validation/test_check_type_bool.py
+++ b/test/units/module_utils/common/validation/test_check_type_bool.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_bool
diff --git a/test/units/module_utils/common/validation/test_check_type_bytes.py b/test/units/module_utils/common/validation/test_check_type_bytes.py
index 6ff62dc..c29e42f 100644
--- a/test/units/module_utils/common/validation/test_check_type_bytes.py
+++ b/test/units/module_utils/common/validation/test_check_type_bytes.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_bytes
diff --git a/test/units/module_utils/common/validation/test_check_type_float.py b/test/units/module_utils/common/validation/test_check_type_float.py
index 57837fa..a021887 100644
--- a/test/units/module_utils/common/validation/test_check_type_float.py
+++ b/test/units/module_utils/common/validation/test_check_type_float.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_float
diff --git a/test/units/module_utils/common/validation/test_check_type_int.py b/test/units/module_utils/common/validation/test_check_type_int.py
index 22cedf6..6f4dc6a 100644
--- a/test/units/module_utils/common/validation/test_check_type_int.py
+++ b/test/units/module_utils/common/validation/test_check_type_int.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_int
diff --git a/test/units/module_utils/common/validation/test_check_type_jsonarg.py b/test/units/module_utils/common/validation/test_check_type_jsonarg.py
index e78e54b..d43bb03 100644
--- a/test/units/module_utils/common/validation/test_check_type_jsonarg.py
+++ b/test/units/module_utils/common/validation/test_check_type_jsonarg.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_jsonarg
diff --git a/test/units/module_utils/common/validation/test_check_type_str.py b/test/units/module_utils/common/validation/test_check_type_str.py
index f10dad2..71af2a0 100644
--- a/test/units/module_utils/common/validation/test_check_type_str.py
+++ b/test/units/module_utils/common/validation/test_check_type_str.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.validation import check_type_str
diff --git a/test/units/module_utils/compat/__init__.py b/test/units/module_utils/compat/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/units/module_utils/compat/__init__.py
diff --git a/test/units/module_utils/compat/test_datetime.py b/test/units/module_utils/compat/test_datetime.py
new file mode 100644
index 0000000..66a0ad0
--- /dev/null
+++ b/test/units/module_utils/compat/test_datetime.py
@@ -0,0 +1,34 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import datetime
+
+from ansible.module_utils.compat.datetime import utcnow, utcfromtimestamp, UTC
+from ansible.module_utils.six import PY3
+
+
+def test_utc():
+ assert UTC.tzname(None) == 'UTC'
+ assert UTC.utcoffset(None) == datetime.timedelta(0)
+
+ if PY3:
+ assert UTC.dst(None) is None
+ else:
+ assert UTC.dst(None) == datetime.timedelta(0)
+
+
+def test_utcnow():
+ assert utcnow().tzinfo is UTC
+
+
+def test_utcfometimestamp_zero():
+ dt = utcfromtimestamp(0)
+
+ assert dt.tzinfo is UTC
+ assert dt.year == 1970
+ assert dt.month == 1
+ assert dt.day == 1
+ assert dt.hour == 0
+ assert dt.minute == 0
+ assert dt.second == 0
+ assert dt.microsecond == 0
diff --git a/test/units/module_utils/conftest.py b/test/units/module_utils/conftest.py
index 8bc13c4..8e82bf2 100644
--- a/test/units/module_utils/conftest.py
+++ b/test/units/module_utils/conftest.py
@@ -12,8 +12,8 @@ import pytest
import ansible.module_utils.basic
from ansible.module_utils.six import PY3, string_types
-from ansible.module_utils._text import to_bytes
-from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils.six.moves.collections_abc import MutableMapping
@pytest.fixture
diff --git a/test/units/module_utils/facts/base.py b/test/units/module_utils/facts/base.py
index 33d3087..3cada8f 100644
--- a/test/units/module_utils/facts/base.py
+++ b/test/units/module_utils/facts/base.py
@@ -48,6 +48,9 @@ class BaseFactsTest(unittest.TestCase):
@patch('platform.system', return_value='Linux')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='systemd')
def test_collect(self, mock_gfc, mock_ps):
+ self._test_collect()
+
+ def _test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
@@ -62,4 +65,3 @@ class BaseFactsTest(unittest.TestCase):
facts_dict = fact_collector.collect_with_namespace(module=module,
collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
- return facts_dict
diff --git a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo
new file mode 100644
index 0000000..32e183f
--- /dev/null
+++ b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo
@@ -0,0 +1,14 @@
+vendor_id : IBM/S390
+# processors : 2
+bogomips per cpu: 3033.00
+max thread id : 0
+features : esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te vx sie
+facilities : 0 1 2 3 4 6 7 8 9 10 12 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 30 31 32 33 34 35 36 37 40 41 42 43 44 45 46 47 48 49 50 51 52 53 55 57 73 74 75 76 77 80 81 82 128 129 131
+cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8
+cache1 : level=1 type=Instruction scope=Private size=96K line_size=256 associativity=6
+cache2 : level=2 type=Data scope=Private size=2048K line_size=256 associativity=8
+cache3 : level=2 type=Instruction scope=Private size=2048K line_size=256 associativity=8
+cache4 : level=3 type=Unified scope=Shared size=65536K line_size=256 associativity=16
+cache5 : level=4 type=Unified scope=Shared size=491520K line_size=256 associativity=30
+processor 0: version = FF, identification = FFFFFF, machine = 2964
+processor 1: version = FF, identification = FFFFFF, machine = 2964
diff --git a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo
new file mode 100644
index 0000000..79fe5a9
--- /dev/null
+++ b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo
@@ -0,0 +1,1037 @@
+vendor_id : IBM/S390
+# processors : 64
+bogomips per cpu: 21881.00
+max thread id : 1
+features : esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te vx vxd vxe gs sie
+facilities : 0 1 2 3 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 30 31 32 33 34 35 36 37 38 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 57 58 59 60 64 65 66 67 68 69 70 71 72 73 75 76 77 78 80 81 82 128 129 130 131 132 133 134 135 138 139 141 142 144 145 146 156
+cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8
+cache1 : level=1 type=Instruction scope=Private size=128K line_size=256 associativity=8
+cache2 : level=2 type=Data scope=Private size=4096K line_size=256 associativity=8
+cache3 : level=2 type=Instruction scope=Private size=2048K line_size=256 associativity=8
+cache4 : level=3 type=Unified scope=Shared size=131072K line_size=256 associativity=32
+cache5 : level=4 type=Unified scope=Shared size=688128K line_size=256 associativity=42
+processor 0: version = 00, identification = FFFFFF, machine = 3906
+processor 1: version = 00, identification = FFFFFF, machine = 3906
+processor 2: version = 00, identification = FFFFFF, machine = 3906
+processor 3: version = 00, identification = FFFFFF, machine = 3906
+processor 4: version = 00, identification = FFFFFF, machine = 3906
+processor 5: version = 00, identification = FFFFFF, machine = 3906
+processor 6: version = 00, identification = FFFFFF, machine = 3906
+processor 7: version = 00, identification = FFFFFF, machine = 3906
+processor 8: version = 00, identification = FFFFFF, machine = 3906
+processor 9: version = 00, identification = FFFFFF, machine = 3906
+processor 10: version = 00, identification = FFFFFF, machine = 3906
+processor 11: version = 00, identification = FFFFFF, machine = 3906
+processor 12: version = 00, identification = FFFFFF, machine = 3906
+processor 13: version = 00, identification = FFFFFF, machine = 3906
+processor 14: version = 00, identification = FFFFFF, machine = 3906
+processor 15: version = 00, identification = FFFFFF, machine = 3906
+processor 16: version = 00, identification = FFFFFF, machine = 3906
+processor 17: version = 00, identification = FFFFFF, machine = 3906
+processor 18: version = 00, identification = FFFFFF, machine = 3906
+processor 19: version = 00, identification = FFFFFF, machine = 3906
+processor 20: version = 00, identification = FFFFFF, machine = 3906
+processor 21: version = 00, identification = FFFFFF, machine = 3906
+processor 22: version = 00, identification = FFFFFF, machine = 3906
+processor 23: version = 00, identification = FFFFFF, machine = 3906
+processor 24: version = 00, identification = FFFFFF, machine = 3906
+processor 25: version = 00, identification = FFFFFF, machine = 3906
+processor 26: version = 00, identification = FFFFFF, machine = 3906
+processor 27: version = 00, identification = FFFFFF, machine = 3906
+processor 28: version = 00, identification = FFFFFF, machine = 3906
+processor 29: version = 00, identification = FFFFFF, machine = 3906
+processor 30: version = 00, identification = FFFFFF, machine = 3906
+processor 31: version = 00, identification = FFFFFF, machine = 3906
+processor 32: version = 00, identification = FFFFFF, machine = 3906
+processor 33: version = 00, identification = FFFFFF, machine = 3906
+processor 34: version = 00, identification = FFFFFF, machine = 3906
+processor 35: version = 00, identification = FFFFFF, machine = 3906
+processor 36: version = 00, identification = FFFFFF, machine = 3906
+processor 37: version = 00, identification = FFFFFF, machine = 3906
+processor 38: version = 00, identification = FFFFFF, machine = 3906
+processor 39: version = 00, identification = FFFFFF, machine = 3906
+processor 40: version = 00, identification = FFFFFF, machine = 3906
+processor 41: version = 00, identification = FFFFFF, machine = 3906
+processor 42: version = 00, identification = FFFFFF, machine = 3906
+processor 43: version = 00, identification = FFFFFF, machine = 3906
+processor 44: version = 00, identification = FFFFFF, machine = 3906
+processor 45: version = 00, identification = FFFFFF, machine = 3906
+processor 46: version = 00, identification = FFFFFF, machine = 3906
+processor 47: version = 00, identification = FFFFFF, machine = 3906
+processor 48: version = 00, identification = FFFFFF, machine = 3906
+processor 49: version = 00, identification = FFFFFF, machine = 3906
+processor 50: version = 00, identification = FFFFFF, machine = 3906
+processor 51: version = 00, identification = FFFFFF, machine = 3906
+processor 52: version = 00, identification = FFFFFF, machine = 3906
+processor 53: version = 00, identification = FFFFFF, machine = 3906
+processor 54: version = 00, identification = FFFFFF, machine = 3906
+processor 55: version = 00, identification = FFFFFF, machine = 3906
+processor 56: version = 00, identification = FFFFFF, machine = 3906
+processor 57: version = 00, identification = FFFFFF, machine = 3906
+processor 58: version = 00, identification = FFFFFF, machine = 3906
+processor 59: version = 00, identification = FFFFFF, machine = 3906
+processor 60: version = 00, identification = FFFFFF, machine = 3906
+processor 61: version = 00, identification = FFFFFF, machine = 3906
+processor 62: version = 00, identification = FFFFFF, machine = 3906
+processor 63: version = 00, identification = FFFFFF, machine = 3906
+
+cpu number : 0
+physical id : 1
+core id : 0
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 0
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 1
+physical id : 1
+core id : 0
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 1
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 2
+physical id : 1
+core id : 1
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 2
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 3
+physical id : 1
+core id : 1
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 3
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 4
+physical id : 1
+core id : 2
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 4
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 5
+physical id : 1
+core id : 2
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 5
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 6
+physical id : 1
+core id : 3
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 6
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 7
+physical id : 1
+core id : 3
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 7
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 8
+physical id : 1
+core id : 4
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 8
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 9
+physical id : 1
+core id : 4
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 9
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 10
+physical id : 1
+core id : 5
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 10
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 11
+physical id : 1
+core id : 5
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 11
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 12
+physical id : 1
+core id : 6
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 12
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 13
+physical id : 1
+core id : 6
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 13
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 14
+physical id : 2
+core id : 7
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 14
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 15
+physical id : 2
+core id : 7
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 15
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 16
+physical id : 2
+core id : 8
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 16
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 17
+physical id : 2
+core id : 8
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 17
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 18
+physical id : 2
+core id : 9
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 18
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 19
+physical id : 2
+core id : 9
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 19
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 20
+physical id : 2
+core id : 10
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 20
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 21
+physical id : 2
+core id : 10
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 21
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 22
+physical id : 2
+core id : 11
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 22
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 23
+physical id : 2
+core id : 11
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 23
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 24
+physical id : 2
+core id : 12
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 24
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 25
+physical id : 2
+core id : 12
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 25
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 26
+physical id : 2
+core id : 13
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 26
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 27
+physical id : 2
+core id : 13
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 27
+siblings : 14
+cpu cores : 7
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 28
+physical id : 3
+core id : 14
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 28
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 29
+physical id : 3
+core id : 14
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 29
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 30
+physical id : 3
+core id : 15
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 30
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 31
+physical id : 3
+core id : 15
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 31
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 32
+physical id : 3
+core id : 16
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 32
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 33
+physical id : 3
+core id : 16
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 33
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 34
+physical id : 3
+core id : 17
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 34
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 35
+physical id : 3
+core id : 17
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 35
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 36
+physical id : 3
+core id : 18
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 36
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 37
+physical id : 3
+core id : 18
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 37
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 38
+physical id : 3
+core id : 19
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 38
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 39
+physical id : 3
+core id : 19
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 39
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 40
+physical id : 3
+core id : 20
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 40
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 41
+physical id : 3
+core id : 20
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 41
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 42
+physical id : 3
+core id : 21
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 42
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 43
+physical id : 3
+core id : 21
+book id : 1
+drawer id : 4
+dedicated : 0
+address : 43
+siblings : 16
+cpu cores : 8
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 44
+physical id : 1
+core id : 22
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 44
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 45
+physical id : 1
+core id : 22
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 45
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 46
+physical id : 1
+core id : 23
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 46
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 47
+physical id : 1
+core id : 23
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 47
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 48
+physical id : 1
+core id : 24
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 48
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 49
+physical id : 1
+core id : 24
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 49
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 50
+physical id : 1
+core id : 25
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 50
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 51
+physical id : 1
+core id : 25
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 51
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 52
+physical id : 1
+core id : 26
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 52
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 53
+physical id : 1
+core id : 26
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 53
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 54
+physical id : 1
+core id : 27
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 54
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 55
+physical id : 1
+core id : 27
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 55
+siblings : 12
+cpu cores : 6
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 56
+physical id : 2
+core id : 28
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 56
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 57
+physical id : 2
+core id : 28
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 57
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 58
+physical id : 2
+core id : 29
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 58
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 59
+physical id : 2
+core id : 29
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 59
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 60
+physical id : 2
+core id : 30
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 60
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 61
+physical id : 2
+core id : 30
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 61
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 62
+physical id : 2
+core id : 31
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 62
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
+cpu number : 63
+physical id : 2
+core id : 31
+book id : 2
+drawer id : 4
+dedicated : 0
+address : 63
+siblings : 8
+cpu cores : 4
+version : 00
+identification : FFFFFF
+machine : 3906
+cpu MHz dynamic : 5208
+cpu MHz static : 5208
+
diff --git a/test/units/module_utils/facts/hardware/linux_data.py b/test/units/module_utils/facts/hardware/linux_data.py
index 3879188..f92f14e 100644
--- a/test/units/module_utils/facts/hardware/linux_data.py
+++ b/test/units/module_utils/facts/hardware/linux_data.py
@@ -18,6 +18,12 @@ __metaclass__ = type
import os
+
+def read_lines(path):
+ with open(path) as file:
+ return file.readlines()
+
+
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
@@ -368,7 +374,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv61',
'nproc_out': 1,
'sched_getaffinity': set([0]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')),
'expected_result': {
'processor': ['0', 'ARMv6-compatible processor rev 7 (v6l)'],
'processor_cores': 1,
@@ -381,7 +387,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv71',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 4 (v7l)',
@@ -399,7 +405,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'aarch64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'AArch64 Processor rev 4 (aarch64)',
@@ -417,7 +423,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
@@ -435,7 +441,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
@@ -457,7 +463,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'arm64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')),
'expected_result': {
'processor': ['0', '1', '2', '3'],
'processor_cores': 1,
@@ -470,7 +476,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv71',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 3 (v7l)',
@@ -492,7 +498,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 2,
'sched_getaffinity': set([0, 1]),
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
@@ -505,7 +511,7 @@ CPU_INFO_TEST_SCENARIOS = [
'processor_vcpus': 2},
},
{
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')),
'architecture': 'ppc64',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
@@ -528,7 +534,7 @@ CPU_INFO_TEST_SCENARIOS = [
},
},
{
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')),
'architecture': 'ppc64le',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
@@ -567,7 +573,41 @@ CPU_INFO_TEST_SCENARIOS = [
},
},
{
- 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')).readlines(),
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo')),
+ 'architecture': 's390x',
+ 'nproc_out': 2,
+ 'sched_getaffinity': set([0, 1]),
+ 'expected_result': {
+ 'processor': [
+ 'IBM/S390',
+ ],
+ 'processor_cores': 2,
+ 'processor_count': 1,
+ 'processor_nproc': 2,
+ 'processor_threads_per_core': 1,
+ 'processor_vcpus': 2
+ },
+ },
+ {
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo')),
+ 'architecture': 's390x',
+ 'nproc_out': 64,
+ 'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63]),
+ 'expected_result': {
+ 'processor': [
+ 'IBM/S390',
+ ],
+ 'processor_cores': 32,
+ 'processor_count': 1,
+ 'processor_nproc': 64,
+ 'processor_threads_per_core': 2,
+ 'processor_vcpus': 64
+ },
+ },
+ {
+ 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')),
'architecture': 'sparc64',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
diff --git a/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py b/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
index aea8694..4167434 100644
--- a/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
+++ b/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
@@ -45,7 +45,7 @@ def test_get_cpu_info_missing_arch(mocker):
module = mocker.Mock()
inst = linux.LinuxHardware(module)
- # ARM and Power will report incorrect processor count if architecture is not available
+ # ARM, Power, and zSystems will report incorrect processor count if architecture is not available
mocker.patch('os.path.exists', return_value=False)
mocker.patch('os.access', return_value=True)
for test in CPU_INFO_TEST_SCENARIOS:
@@ -56,7 +56,7 @@ def test_get_cpu_info_missing_arch(mocker):
test_result = inst.get_cpu_facts()
- if test['architecture'].startswith(('armv', 'aarch', 'ppc')):
+ if test['architecture'].startswith(('armv', 'aarch', 'ppc', 's390')):
assert test['expected_result'] != test_result
else:
assert test['expected_result'] == test_result
diff --git a/test/units/module_utils/facts/network/test_locally_reachable_ips.py b/test/units/module_utils/facts/network/test_locally_reachable_ips.py
new file mode 100644
index 0000000..7eac790
--- /dev/null
+++ b/test/units/module_utils/facts/network/test_locally_reachable_ips.py
@@ -0,0 +1,93 @@
+# This file is part of Ansible
+# -*- coding: utf-8 -*-
+#
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from units.compat.mock import Mock
+from units.compat import unittest
+from ansible.module_utils.facts.network import linux
+
+# ip -4 route show table local
+IP4_ROUTE_SHOW_LOCAL = """
+broadcast 127.0.0.0 dev lo proto kernel scope link src 127.0.0.1
+local 127.0.0.0/8 dev lo proto kernel scope host src 127.0.0.1
+local 127.0.0.1 dev lo proto kernel scope host src 127.0.0.1
+broadcast 127.255.255.255 dev lo proto kernel scope link src 127.0.0.1
+local 192.168.1.0/24 dev lo scope host
+"""
+
+# ip -6 route show table local
+IP6_ROUTE_SHOW_LOCAL = """
+local ::1 dev lo proto kernel metric 0 pref medium
+local 2a02:123:3:1::e dev enp94s0f0np0 proto kernel metric 0 pref medium
+local 2a02:123:15::/48 dev lo metric 1024 pref medium
+local 2a02:123:16::/48 dev lo metric 1024 pref medium
+local fe80::2eea:7fff:feca:fe68 dev enp94s0f0np0 proto kernel metric 0 pref medium
+multicast ff00::/8 dev enp94s0f0np0 proto kernel metric 256 pref medium
+"""
+
+# Hash returned by get_locally_reachable_ips()
+IP_ROUTE_SHOW_LOCAL_EXPECTED = {
+ 'ipv4': [
+ '127.0.0.0/8',
+ '127.0.0.1',
+ '192.168.1.0/24'
+ ],
+ 'ipv6': [
+ '::1',
+ '2a02:123:3:1::e',
+ '2a02:123:15::/48',
+ '2a02:123:16::/48',
+ 'fe80::2eea:7fff:feca:fe68'
+ ]
+}
+
+
+class TestLocalRoutesLinux(unittest.TestCase):
+ gather_subset = ['all']
+
+ def get_bin_path(self, command):
+ if command == 'ip':
+ return 'fake/ip'
+ return None
+
+ def run_command(self, command):
+ if command == ['fake/ip', '-4', 'route', 'show', 'table', 'local']:
+ return 0, IP4_ROUTE_SHOW_LOCAL, ''
+ if command == ['fake/ip', '-6', 'route', 'show', 'table', 'local']:
+ return 0, IP6_ROUTE_SHOW_LOCAL, ''
+ return 1, '', ''
+
+ def test(self):
+ module = self._mock_module()
+ module.get_bin_path.side_effect = self.get_bin_path
+ module.run_command.side_effect = self.run_command
+
+ net = linux.LinuxNetwork(module)
+ res = net.get_locally_reachable_ips('fake/ip')
+ self.assertDictEqual(res, IP_ROUTE_SHOW_LOCAL_EXPECTED)
+
+ def _mock_module(self):
+ mock_module = Mock()
+ mock_module.params = {'gather_subset': self.gather_subset,
+ 'gather_timeout': 5,
+ 'filter': '*'}
+ mock_module.get_bin_path = Mock(return_value=None)
+ return mock_module
diff --git a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
index c095756..6667ada 100644
--- a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
+++ b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
@@ -21,7 +21,8 @@ def test_input():
def test_parse_distribution_file_clear_linux(mock_module, test_input):
- test_input['data'] = open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files/ClearLinux')).read()
+ with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files/ClearLinux')) as file:
+ test_input['data'] = file.read()
result = (
True,
@@ -43,7 +44,8 @@ def test_parse_distribution_file_clear_linux_no_match(mock_module, distro_file,
Test against data from Linux Mint and CoreOS to ensure we do not get a reported
match from parse_distribution_file_ClearLinux()
"""
- test_input['data'] = open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)).read()
+ with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
+ test_input['data'] = file.read()
result = (False, {})
diff --git a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
index 53fd4ea..efb937e 100644
--- a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
+++ b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
@@ -19,9 +19,12 @@ from ansible.module_utils.facts.system.distribution import DistributionFiles
)
)
def test_parse_distribution_file_slackware(mock_module, distro_file, expected_version):
+ with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
+ data = file.read()
+
test_input = {
'name': 'Slackware',
- 'data': open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)).read(),
+ 'data': data,
'path': '/etc/os-release',
'collected_facts': None,
}
diff --git a/test/units/module_utils/facts/system/test_pkg_mgr.py b/test/units/module_utils/facts/system/test_pkg_mgr.py
new file mode 100644
index 0000000..8dc1a3b
--- /dev/null
+++ b/test/units/module_utils/facts/system/test_pkg_mgr.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2023, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector
+
+
+_FEDORA_FACTS = {
+ "ansible_distribution": "Fedora",
+ "ansible_distribution_major_version": 38, # any version where yum isn't default
+ "ansible_os_family": "RedHat"
+}
+
+_KYLIN_FACTS = {
+ "ansible_distribution": "Kylin Linux Advanced Server",
+ "ansible_distribution_major_version": "V10",
+ "ansible_os_family": "RedHat"
+}
+
+# NOTE pkg_mgr == "dnf" means the dnf module for the dnf 4 or below
+
+
+def test_default_dnf_version_detection_kylin_dnf4(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3"))
+ mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
+ assert PkgMgrFactCollector().collect(collected_facts=_KYLIN_FACTS).get("pkg_mgr") == "dnf"
+
+
+def test_default_dnf_version_detection_fedora_dnf4(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3"))
+ mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
+
+
+def test_default_dnf_version_detection_fedora_dnf5(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf5"))
+ mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf5"}.get(p, p))
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf5"
+
+
+def test_default_dnf_version_detection_fedora_dnf4_both_installed(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3", "/usr/bin/dnf5"))
+ mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
+
+
+def test_default_dnf_version_detection_fedora_dnf4_microdnf5_installed(mocker):
+ mocker.patch(
+ "os.path.exists",
+ lambda p: p in ("/usr/bin/dnf", "/usr/bin/microdnf", "/usr/bin/dnf-3", "/usr/bin/dnf5")
+ )
+ mocker.patch(
+ "os.path.realpath",
+ lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3", "/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p)
+ )
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
+
+
+def test_default_dnf_version_detection_fedora_dnf4_microdnf(mocker):
+ mocker.patch("os.path.exists", lambda p: p == "/usr/bin/microdnf")
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
+
+
+def test_default_dnf_version_detection_fedora_dnf5_microdnf(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/microdnf", "/usr/bin/dnf5"))
+ mocker.patch("os.path.realpath", lambda p: {"/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p))
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf5"
+
+
+def test_default_dnf_version_detection_fedora_no_default(mocker):
+ mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf-3", "/usr/bin/dnf5"))
+ assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "unknown"
diff --git a/test/units/module_utils/facts/test_collectors.py b/test/units/module_utils/facts/test_collectors.py
index c480602..984b585 100644
--- a/test/units/module_utils/facts/test_collectors.py
+++ b/test/units/module_utils/facts/test_collectors.py
@@ -93,7 +93,7 @@ class TestApparmorFacts(BaseFactsTest):
collector_class = ApparmorFactCollector
def test_collect(self):
- facts_dict = super(TestApparmorFacts, self).test_collect()
+ facts_dict = super(TestApparmorFacts, self)._test_collect()
self.assertIn('status', facts_dict['apparmor'])
@@ -191,7 +191,7 @@ class TestEnvFacts(BaseFactsTest):
collector_class = EnvFactCollector
def test_collect(self):
- facts_dict = super(TestEnvFacts, self).test_collect()
+ facts_dict = super(TestEnvFacts, self)._test_collect()
self.assertIn('HOME', facts_dict['env'])
@@ -355,7 +355,6 @@ class TestSelinuxFacts(BaseFactsTest):
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['selinux']['status'], 'Missing selinux Python library')
- return facts_dict
class TestServiceMgrFacts(BaseFactsTest):
diff --git a/test/units/module_utils/facts/test_date_time.py b/test/units/module_utils/facts/test_date_time.py
index 6abc36a..6cc05f9 100644
--- a/test/units/module_utils/facts/test_date_time.py
+++ b/test/units/module_utils/facts/test_date_time.py
@@ -10,28 +10,27 @@ import datetime
import string
import time
+from ansible.module_utils.compat.datetime import UTC
from ansible.module_utils.facts.system import date_time
EPOCH_TS = 1594449296.123456
DT = datetime.datetime(2020, 7, 11, 12, 34, 56, 124356)
-DT_UTC = datetime.datetime(2020, 7, 11, 2, 34, 56, 124356)
+UTC_DT = datetime.datetime(2020, 7, 11, 2, 34, 56, 124356)
@pytest.fixture
def fake_now(monkeypatch):
"""
- Patch `datetime.datetime.fromtimestamp()`, `datetime.datetime.utcfromtimestamp()`,
+ Patch `datetime.datetime.fromtimestamp()`,
and `time.time()` to return deterministic values.
"""
class FakeNow:
@classmethod
- def fromtimestamp(cls, timestamp):
- return DT
-
- @classmethod
- def utcfromtimestamp(cls, timestamp):
- return DT_UTC
+ def fromtimestamp(cls, timestamp, tz=None):
+ if tz == UTC:
+ return UTC_DT.replace(tzinfo=tz)
+ return DT.replace(tzinfo=tz)
def _time():
return EPOCH_TS
diff --git a/test/units/module_utils/facts/test_sysctl.py b/test/units/module_utils/facts/test_sysctl.py
index c369b61..0f1632b 100644
--- a/test/units/module_utils/facts/test_sysctl.py
+++ b/test/units/module_utils/facts/test_sysctl.py
@@ -20,13 +20,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import os
-
-import pytest
-
# for testing
from units.compat import unittest
-from units.compat.mock import patch, MagicMock, mock_open, Mock
+from units.compat.mock import MagicMock
from ansible.module_utils.facts.sysctl import get_sysctl
diff --git a/test/units/module_utils/facts/test_timeout.py b/test/units/module_utils/facts/test_timeout.py
index 2adbc4a..6ba7c39 100644
--- a/test/units/module_utils/facts/test_timeout.py
+++ b/test/units/module_utils/facts/test_timeout.py
@@ -139,7 +139,7 @@ def function_other_timeout():
@timeout.timeout(1)
def function_raises():
- 1 / 0
+ return 1 / 0
@timeout.timeout(1)
diff --git a/test/units/module_utils/test_text.py b/test/units/module_utils/test_text.py
new file mode 100644
index 0000000..72ef2ab
--- /dev/null
+++ b/test/units/module_utils/test_text.py
@@ -0,0 +1,21 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import codecs
+
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils.six import PY3, text_type, binary_type
+
+
+def test_exports():
+ """Ensure legacy attributes are exported."""
+
+ from ansible.module_utils import _text
+
+ assert _text.codecs == codecs
+ assert _text.PY3 == PY3
+ assert _text.text_type == text_type
+ assert _text.binary_type == binary_type
+ assert _text.to_bytes == to_bytes
+ assert _text.to_native == to_native
+ assert _text.to_text == to_text
diff --git a/test/units/module_utils/urls/test_Request.py b/test/units/module_utils/urls/test_Request.py
index d2c4ea3..a8bc3a0 100644
--- a/test/units/module_utils/urls/test_Request.py
+++ b/test/units/module_utils/urls/test_Request.py
@@ -33,6 +33,7 @@ def install_opener_mock(mocker):
def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
here = os.path.dirname(__file__)
pem = os.path.join(here, 'fixtures/client.pem')
+ client_key = os.path.join(here, 'fixtures/client.key')
cookies = cookiejar.CookieJar()
request = Request(
@@ -46,8 +47,8 @@ def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
http_agent='ansible-tests',
force_basic_auth=True,
follow_redirects='all',
- client_cert='/tmp/client.pem',
- client_key='/tmp/client.key',
+ client_cert=pem,
+ client_key=client_key,
cookies=cookies,
unix_socket='/foo/bar/baz.sock',
ca_path=pem,
@@ -68,8 +69,8 @@ def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
call(None, 'ansible-tests'), # http_agent
call(None, True), # force_basic_auth
call(None, 'all'), # follow_redirects
- call(None, '/tmp/client.pem'), # client_cert
- call(None, '/tmp/client.key'), # client_key
+ call(None, pem), # client_cert
+ call(None, client_key), # client_key
call(None, cookies), # cookies
call(None, '/foo/bar/baz.sock'), # unix_socket
call(None, pem), # ca_path
@@ -358,10 +359,7 @@ def test_Request_open_client_cert(urlopen_mock, install_opener_mock):
assert ssl_handler.client_cert == client_cert
assert ssl_handler.client_key == client_key
- https_connection = ssl_handler._build_https_connection('ansible.com')
-
- assert https_connection.key_file == client_key
- assert https_connection.cert_file == client_cert
+ ssl_handler._build_https_connection('ansible.com')
def test_Request_open_cookies(urlopen_mock, install_opener_mock):
diff --git a/test/units/module_utils/urls/test_fetch_file.py b/test/units/module_utils/urls/test_fetch_file.py
index ed11227..ecb6b9f 100644
--- a/test/units/module_utils/urls/test_fetch_file.py
+++ b/test/units/module_utils/urls/test_fetch_file.py
@@ -10,7 +10,6 @@ import os
from ansible.module_utils.urls import fetch_file
import pytest
-from units.compat.mock import MagicMock
class FakeTemporaryFile:
diff --git a/test/units/module_utils/urls/test_prepare_multipart.py b/test/units/module_utils/urls/test_prepare_multipart.py
index 226d9ed..ee32047 100644
--- a/test/units/module_utils/urls/test_prepare_multipart.py
+++ b/test/units/module_utils/urls/test_prepare_multipart.py
@@ -7,8 +7,6 @@ __metaclass__ = type
import os
-from io import StringIO
-
from email.message import Message
import pytest
diff --git a/test/units/module_utils/urls/test_urls.py b/test/units/module_utils/urls/test_urls.py
index 69c1b82..f0e5e9e 100644
--- a/test/units/module_utils/urls/test_urls.py
+++ b/test/units/module_utils/urls/test_urls.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils import urls
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
import pytest
diff --git a/test/units/modules/conftest.py b/test/units/modules/conftest.py
index a7d1e04..c60c586 100644
--- a/test/units/modules/conftest.py
+++ b/test/units/modules/conftest.py
@@ -8,24 +8,15 @@ import json
import pytest
-from ansible.module_utils.six import string_types
-from ansible.module_utils._text import to_bytes
-from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.common.text.converters import to_bytes
@pytest.fixture
def patch_ansible_module(request, mocker):
- if isinstance(request.param, string_types):
- args = request.param
- elif isinstance(request.param, MutableMapping):
- if 'ANSIBLE_MODULE_ARGS' not in request.param:
- request.param = {'ANSIBLE_MODULE_ARGS': request.param}
- if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
- if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
- args = json.dumps(request.param)
- else:
- raise Exception('Malformed data to the patch_ansible_module pytest fixture')
+ request.param = {'ANSIBLE_MODULE_ARGS': request.param}
+ request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
+ request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
+
+ args = json.dumps(request.param)
mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))
diff --git a/test/units/modules/test_apt.py b/test/units/modules/test_apt.py
index 20e056f..a5aa4a9 100644
--- a/test/units/modules/test_apt.py
+++ b/test/units/modules/test_apt.py
@@ -2,20 +2,13 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
-import sys
from units.compat.mock import Mock
from units.compat import unittest
-try:
- from ansible.modules.apt import (
- expand_pkgspec_from_fnmatches,
- )
-except Exception:
- # Need some more module_utils work (porting urls.py) before we can test
- # modules. So don't error out in this case.
- if sys.version_info[0] >= 3:
- pass
+from ansible.modules.apt import (
+ expand_pkgspec_from_fnmatches,
+)
class AptExpandPkgspecTestCase(unittest.TestCase):
@@ -29,25 +22,25 @@ class AptExpandPkgspecTestCase(unittest.TestCase):
]
def test_trivial(self):
- foo = ["apt"]
+ pkg = ["apt"]
self.assertEqual(
- expand_pkgspec_from_fnmatches(None, foo, self.fake_cache), foo)
+ expand_pkgspec_from_fnmatches(None, pkg, self.fake_cache), pkg)
def test_version_wildcard(self):
- foo = ["apt=1.0*"]
+ pkg = ["apt=1.0*"]
self.assertEqual(
- expand_pkgspec_from_fnmatches(None, foo, self.fake_cache), foo)
+ expand_pkgspec_from_fnmatches(None, pkg, self.fake_cache), pkg)
def test_pkgname_wildcard_version_wildcard(self):
- foo = ["apt*=1.0*"]
+ pkg = ["apt*=1.0*"]
m_mock = Mock()
self.assertEqual(
- expand_pkgspec_from_fnmatches(m_mock, foo, self.fake_cache),
+ expand_pkgspec_from_fnmatches(m_mock, pkg, self.fake_cache),
['apt', 'apt-utils'])
def test_pkgname_expands(self):
- foo = ["apt*"]
+ pkg = ["apt*"]
m_mock = Mock()
self.assertEqual(
- expand_pkgspec_from_fnmatches(m_mock, foo, self.fake_cache),
+ expand_pkgspec_from_fnmatches(m_mock, pkg, self.fake_cache),
["apt", "apt-utils"])
diff --git a/test/units/modules/test_async_wrapper.py b/test/units/modules/test_async_wrapper.py
index 37b1fda..dbaf683 100644
--- a/test/units/modules/test_async_wrapper.py
+++ b/test/units/modules/test_async_wrapper.py
@@ -7,26 +7,21 @@ __metaclass__ = type
import os
import json
import shutil
+import sys
import tempfile
-import pytest
-
-from units.compat.mock import patch, MagicMock
from ansible.modules import async_wrapper
-from pprint import pprint
-
class TestAsyncWrapper:
def test_run_module(self, monkeypatch):
def mock_get_interpreter(module_path):
- return ['/usr/bin/python']
+ return [sys.executable]
module_result = {'rc': 0}
module_lines = [
- '#!/usr/bin/python',
'import sys',
'sys.stderr.write("stderr stuff")',
"print('%s')" % json.dumps(module_result)
diff --git a/test/units/modules/test_copy.py b/test/units/modules/test_copy.py
index 20c309b..beeef6d 100644
--- a/test/units/modules/test_copy.py
+++ b/test/units/modules/test_copy.py
@@ -128,16 +128,19 @@ def test_split_pre_existing_dir_working_dir_exists(directory, expected, mocker):
#
# Info helpful for making new test cases:
#
-# base_mode = {'dir no perms': 0o040000,
-# 'file no perms': 0o100000,
-# 'dir all perms': 0o400000 | 0o777,
-# 'file all perms': 0o100000, | 0o777}
+# base_mode = {
+# 'dir no perms': 0o040000,
+# 'file no perms': 0o100000,
+# 'dir all perms': 0o040000 | 0o777,
+# 'file all perms': 0o100000 | 0o777}
#
-# perm_bits = {'x': 0b001,
+# perm_bits = {
+# 'x': 0b001,
# 'w': 0b010,
# 'r': 0b100}
#
-# role_shift = {'u': 6,
+# role_shift = {
+# 'u': 6,
# 'g': 3,
# 'o': 0}
@@ -172,6 +175,10 @@ DATA = ( # Going from no permissions to setting all for user, group, and/or oth
# chmod a-X statfile <== removes execute from statfile
(0o100777, u'a-X', 0o0666),
+ # Verify X uses computed not original mode
+ (0o100777, u'a=,u=rX', 0o0400),
+ (0o040777, u'a=,u=rX', 0o0500),
+
# Multiple permissions
(0o040000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0755),
(0o100000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0644),
@@ -185,6 +192,10 @@ UMASK_DATA = (
INVALID_DATA = (
(0o040000, u'a=foo', "bad symbolic permission for mode: a=foo"),
(0o040000, u'f=rwx', "bad symbolic permission for mode: f=rwx"),
+ (0o100777, u'of=r', "bad symbolic permission for mode: of=r"),
+
+ (0o100777, u'ao=r', "bad symbolic permission for mode: ao=r"),
+ (0o100777, u'oa=r', "bad symbolic permission for mode: oa=r"),
)
diff --git a/test/units/modules/test_hostname.py b/test/units/modules/test_hostname.py
index 9050fd0..1aa4a57 100644
--- a/test/units/modules/test_hostname.py
+++ b/test/units/modules/test_hostname.py
@@ -6,7 +6,6 @@ import shutil
import tempfile
from units.compat.mock import patch, MagicMock, mock_open
-from ansible.module_utils import basic
from ansible.module_utils.common._utils import get_all_subclasses
from ansible.modules import hostname
from units.modules.utils import ModuleTestCase, set_module_args
@@ -44,12 +43,9 @@ class TestHostname(ModuleTestCase):
classname = "%sStrategy" % prefix
cls = getattr(hostname, classname, None)
- if cls is None:
- self.assertFalse(
- cls is None, "%s is None, should be a subclass" % classname
- )
- else:
- self.assertTrue(issubclass(cls, hostname.BaseStrategy))
+ assert cls is not None
+
+ self.assertTrue(issubclass(cls, hostname.BaseStrategy))
class TestRedhatStrategy(ModuleTestCase):
diff --git a/test/units/modules/test_iptables.py b/test/units/modules/test_iptables.py
index 265e770..2459cf7 100644
--- a/test/units/modules/test_iptables.py
+++ b/test/units/modules/test_iptables.py
@@ -181,7 +181,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 1)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -208,7 +208,6 @@ class TestIptables(ModuleTestCase):
commands_results = [
(1, '', ''), # check_rule_present
- (0, '', ''), # check_chain_present
(0, '', ''),
]
@@ -218,7 +217,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 3)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -232,7 +231,7 @@ class TestIptables(ModuleTestCase):
'-j',
'ACCEPT'
])
- self.assertEqual(run_command.call_args_list[2][0][0], [
+ self.assertEqual(run_command.call_args_list[1][0][0], [
'/sbin/iptables',
'-t',
'filter',
@@ -272,7 +271,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 1)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -321,7 +320,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 3)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -343,7 +342,7 @@ class TestIptables(ModuleTestCase):
'--to-ports',
'8600'
])
- self.assertEqual(run_command.call_args_list[2][0][0], [
+ self.assertEqual(run_command.call_args_list[1][0][0], [
'/sbin/iptables',
'-t',
'nat',
@@ -1019,10 +1018,8 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
- (1, '', ''), # check_rule_present
(1, '', ''), # check_chain_present
(0, '', ''), # create_chain
- (0, '', ''), # append_rule
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -1031,32 +1028,20 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 4)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t', 'filter',
- '-C', 'FOOBAR',
- ])
-
- self.assertEqual(run_command.call_args_list[1][0][0], [
- '/sbin/iptables',
- '-t', 'filter',
'-L', 'FOOBAR',
])
- self.assertEqual(run_command.call_args_list[2][0][0], [
+ self.assertEqual(run_command.call_args_list[1][0][0], [
'/sbin/iptables',
'-t', 'filter',
'-N', 'FOOBAR',
])
- self.assertEqual(run_command.call_args_list[3][0][0], [
- '/sbin/iptables',
- '-t', 'filter',
- '-A', 'FOOBAR',
- ])
-
commands_results = [
(0, '', ''), # check_rule_present
]
@@ -1078,7 +1063,6 @@ class TestIptables(ModuleTestCase):
commands_results = [
(1, '', ''), # check_rule_present
- (1, '', ''), # check_chain_present
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -1087,17 +1071,11 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 1)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t', 'filter',
- '-C', 'FOOBAR',
- ])
-
- self.assertEqual(run_command.call_args_list[1][0][0], [
- '/sbin/iptables',
- '-t', 'filter',
'-L', 'FOOBAR',
])
diff --git a/test/units/modules/test_known_hosts.py b/test/units/modules/test_known_hosts.py
index 123dd75..667f3e5 100644
--- a/test/units/modules/test_known_hosts.py
+++ b/test/units/modules/test_known_hosts.py
@@ -6,7 +6,7 @@ import tempfile
from ansible.module_utils import basic
from units.compat import unittest
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.basic import AnsibleModule
from ansible.modules.known_hosts import compute_diff, sanity_check
diff --git a/test/units/modules/test_unarchive.py b/test/units/modules/test_unarchive.py
index 3e7a58c..935231b 100644
--- a/test/units/modules/test_unarchive.py
+++ b/test/units/modules/test_unarchive.py
@@ -8,20 +8,6 @@ import pytest
from ansible.modules.unarchive import ZipArchive, TgzArchive
-class AnsibleModuleExit(Exception):
- def __init__(self, *args, **kwargs):
- self.args = args
- self.kwargs = kwargs
-
-
-class ExitJson(AnsibleModuleExit):
- pass
-
-
-class FailJson(AnsibleModuleExit):
- pass
-
-
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
@@ -32,12 +18,6 @@ class FakeAnsibleModule:
self.params = {}
self.tmpdir = None
- def exit_json(self, *args, **kwargs):
- raise ExitJson(*args, **kwargs)
-
- def fail_json(self, *args, **kwargs):
- raise FailJson(*args, **kwargs)
-
class TestCaseZipArchive:
@pytest.mark.parametrize(
diff --git a/test/units/modules/utils.py b/test/units/modules/utils.py
index 6d169e3..b56229e 100644
--- a/test/units/modules/utils.py
+++ b/test/units/modules/utils.py
@@ -6,14 +6,12 @@ import json
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
def set_module_args(args):
- if '_ansible_remote_tmp' not in args:
- args['_ansible_remote_tmp'] = '/tmp'
- if '_ansible_keep_remote_files' not in args:
- args['_ansible_keep_remote_files'] = False
+ args['_ansible_remote_tmp'] = '/tmp'
+ args['_ansible_keep_remote_files'] = False
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
@@ -28,8 +26,6 @@ class AnsibleFailJson(Exception):
def exit_json(*args, **kwargs):
- if 'changed' not in kwargs:
- kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
diff --git a/test/units/parsing/test_ajson.py b/test/units/parsing/test_ajson.py
index 1b9a76b..bb7bf1a 100644
--- a/test/units/parsing/test_ajson.py
+++ b/test/units/parsing/test_ajson.py
@@ -109,7 +109,11 @@ class TestAnsibleJSONEncoder:
def __len__(self):
return len(self.__dict__)
- return M(request.param)
+ mapping = M(request.param)
+
+ assert isinstance(len(mapping), int) # ensure coverage of __len__
+
+ return mapping
@pytest.fixture
def ansible_json_encoder(self):
diff --git a/test/units/parsing/test_dataloader.py b/test/units/parsing/test_dataloader.py
index 9ec49a8..a7f8b1d 100644
--- a/test/units/parsing/test_dataloader.py
+++ b/test/units/parsing/test_dataloader.py
@@ -25,8 +25,7 @@ from units.compat import unittest
from unittest.mock import patch, mock_open
from ansible.errors import AnsibleParserError, yaml_strings, AnsibleFileNotFound
from ansible.parsing.vault import AnsibleVaultError
-from ansible.module_utils._text import to_text
-from ansible.module_utils.six import PY3
+from ansible.module_utils.common.text.converters import to_text
from units.mock.vault_helper import TextVaultSecret
from ansible.parsing.dataloader import DataLoader
@@ -92,11 +91,11 @@ class TestDataLoader(unittest.TestCase):
- { role: 'testrole' }
testrole/tasks/main.yml:
- - include: "include1.yml"
+ - include_tasks: "include1.yml"
static: no
testrole/tasks/include1.yml:
- - include: include2.yml
+ - include_tasks: include2.yml
static: no
testrole/tasks/include2.yml:
@@ -229,11 +228,7 @@ class TestDataLoaderWithVault(unittest.TestCase):
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
- if PY3:
- builtins_name = 'builtins'
- else:
- builtins_name = '__builtin__'
- with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
+ with patch('builtins.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
output = self._loader.load_from_file('dummy_vault.txt')
self.assertEqual(output, dict(foo='bar'))
diff --git a/test/units/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py
index 5d3f5d2..aeb74ad 100644
--- a/test/units/parsing/test_mod_args.py
+++ b/test/units/parsing/test_mod_args.py
@@ -6,10 +6,10 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
-import re
from ansible.errors import AnsibleParserError
from ansible.parsing.mod_args import ModuleArgsParser
+from ansible.plugins.loader import init_plugin_loader
from ansible.utils.sentinel import Sentinel
@@ -119,19 +119,19 @@ class TestModArgsDwim:
assert err.value.args[0] == msg
def test_multiple_actions_ping_shell(self):
+ init_plugin_loader()
args_dict = {'ping': 'data=hi', 'shell': 'echo hi'}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
- assert err.value.args[0].startswith("conflicting action statements: ")
- actions = set(re.search(r'(\w+), (\w+)', err.value.args[0]).groups())
- assert actions == set(['ping', 'shell'])
+ assert err.value.args[0] == f'conflicting action statements: {", ".join(args_dict)}'
def test_bogus_action(self):
+ init_plugin_loader()
args_dict = {'bogusaction': {}}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
- assert err.value.args[0].startswith("couldn't resolve module/action 'bogusaction'")
+ assert err.value.args[0].startswith(f"couldn't resolve module/action '{next(iter(args_dict))}'")
diff --git a/test/units/parsing/test_splitter.py b/test/units/parsing/test_splitter.py
index a37de0f..893f047 100644
--- a/test/units/parsing/test_splitter.py
+++ b/test/units/parsing/test_splitter.py
@@ -21,10 +21,17 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.parsing.splitter import split_args, parse_kv
+from ansible.errors import AnsibleParserError
import pytest
SPLIT_DATA = (
+ (None,
+ [],
+ {}),
+ (u'',
+ [],
+ {}),
(u'a',
[u'a'],
{u'_raw_params': u'a'}),
@@ -46,6 +53,18 @@ SPLIT_DATA = (
(u'a="echo \\"hello world\\"" b=bar',
[u'a="echo \\"hello world\\""', u'b=bar'],
{u'a': u'echo "hello world"', u'b': u'bar'}),
+ (u'a="nest\'ed"',
+ [u'a="nest\'ed"'],
+ {u'a': u'nest\'ed'}),
+ (u' ',
+ [u' '],
+ {u'_raw_params': u' '}),
+ (u'\\ ',
+ [u' '],
+ {u'_raw_params': u' '}),
+ (u'a\\=escaped',
+ [u'a\\=escaped'],
+ {u'_raw_params': u'a=escaped'}),
(u'a="multi\nline"',
[u'a="multi\nline"'],
{u'a': u'multi\nline'}),
@@ -61,12 +80,27 @@ SPLIT_DATA = (
(u'a="multiline\nmessage1\\\n" b="multiline\nmessage2\\\n"',
[u'a="multiline\nmessage1\\\n"', u'b="multiline\nmessage2\\\n"'],
{u'a': 'multiline\nmessage1\\\n', u'b': u'multiline\nmessage2\\\n'}),
+ (u'line \\\ncontinuation',
+ [u'line', u'continuation'],
+ {u'_raw_params': u'line continuation'}),
+ (u'not jinja}}',
+ [u'not', u'jinja}}'],
+ {u'_raw_params': u'not jinja}}'}),
+ (u'a={{multiline\njinja}}',
+ [u'a={{multiline\njinja}}'],
+ {u'a': u'{{multiline\njinja}}'}),
(u'a={{jinja}}',
[u'a={{jinja}}'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}',
[u'a={{ jinja }}'],
{u'a': u'{{ jinja }}'}),
+ (u'a={% jinja %}',
+ [u'a={% jinja %}'],
+ {u'a': u'{% jinja %}'}),
+ (u'a={# jinja #}',
+ [u'a={# jinja #}'],
+ {u'a': u'{# jinja #}'}),
(u'a="{{jinja}}"',
[u'a="{{jinja}}"'],
{u'a': u'{{jinja}}'}),
@@ -94,17 +128,50 @@ SPLIT_DATA = (
(u'One\n Two\n Three\n',
[u'One\n ', u'Two\n ', u'Three\n'],
{u'_raw_params': u'One\n Two\n Three\n'}),
+ (u'\nOne\n Two\n Three\n',
+ [u'\n', u'One\n ', u'Two\n ', u'Three\n'],
+ {u'_raw_params': u'\nOne\n Two\n Three\n'}),
)
-SPLIT_ARGS = ((test[0], test[1]) for test in SPLIT_DATA)
-PARSE_KV = ((test[0], test[2]) for test in SPLIT_DATA)
+PARSE_KV_CHECK_RAW = (
+ (u'raw=yes', {u'_raw_params': u'raw=yes'}),
+ (u'creates=something', {u'creates': u'something'}),
+)
+
+PARSER_ERROR = (
+ '"',
+ "'",
+ '{{',
+ '{%',
+ '{#',
+)
+SPLIT_ARGS = tuple((test[0], test[1]) for test in SPLIT_DATA)
+PARSE_KV = tuple((test[0], test[2]) for test in SPLIT_DATA)
-@pytest.mark.parametrize("args, expected", SPLIT_ARGS)
+
+@pytest.mark.parametrize("args, expected", SPLIT_ARGS, ids=[str(arg[0]) for arg in SPLIT_ARGS])
def test_split_args(args, expected):
assert split_args(args) == expected
-@pytest.mark.parametrize("args, expected", PARSE_KV)
+@pytest.mark.parametrize("args, expected", PARSE_KV, ids=[str(arg[0]) for arg in PARSE_KV])
def test_parse_kv(args, expected):
assert parse_kv(args) == expected
+
+
+@pytest.mark.parametrize("args, expected", PARSE_KV_CHECK_RAW, ids=[str(arg[0]) for arg in PARSE_KV_CHECK_RAW])
+def test_parse_kv_check_raw(args, expected):
+ assert parse_kv(args, check_raw=True) == expected
+
+
+@pytest.mark.parametrize("args", PARSER_ERROR)
+def test_split_args_error(args):
+ with pytest.raises(AnsibleParserError):
+ split_args(args)
+
+
+@pytest.mark.parametrize("args", PARSER_ERROR)
+def test_parse_kv_error(args):
+ with pytest.raises(AnsibleParserError):
+ parse_kv(args)
diff --git a/test/units/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
index 7afd356..f94171a 100644
--- a/test/units/parsing/vault/test_vault.py
+++ b/test/units/parsing/vault/test_vault.py
@@ -21,7 +21,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import binascii
import io
import os
import tempfile
@@ -34,7 +33,7 @@ from unittest.mock import patch, MagicMock
from ansible import errors
from ansible.module_utils import six
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.parsing import vault
from units.mock.loader import DictDataLoader
@@ -606,9 +605,6 @@ class TestVaultLib(unittest.TestCase):
('test_id', text_secret)]
self.v = vault.VaultLib(self.vault_secrets)
- def _vault_secrets(self, vault_id, secret):
- return [(vault_id, secret)]
-
def _vault_secrets_from_password(self, vault_id, password):
return [(vault_id, TextVaultSecret(password))]
@@ -779,43 +775,6 @@ class TestVaultLib(unittest.TestCase):
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")
- # FIXME This test isn't working quite yet.
- @pytest.mark.skip(reason='This test is not ready yet')
- def test_encrypt_decrypt_aes256_bad_hmac(self):
-
- self.v.cipher_name = 'AES256'
- # plaintext = "Setec Astronomy"
- enc_data = '''$ANSIBLE_VAULT;1.1;AES256
-33363965326261303234626463623963633531343539616138316433353830356566396130353436
-3562643163366231316662386565383735653432386435610a306664636137376132643732393835
-63383038383730306639353234326630666539346233376330303938323639306661313032396437
-6233623062366136310a633866373936313238333730653739323461656662303864663666653563
-3138'''
- b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
- b_data = self.v._split_header(b_data)
- foo = binascii.unhexlify(b_data)
- lines = foo.splitlines()
- # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
- b_salt = lines[0]
- b_hmac = lines[1]
- b_ciphertext_data = b'\n'.join(lines[2:])
-
- b_ciphertext = binascii.unhexlify(b_ciphertext_data)
- # b_orig_ciphertext = b_ciphertext[:]
-
- # now muck with the text
- # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
- # b_munged_ciphertext = b_ciphertext
- # assert b_orig_ciphertext != b_munged_ciphertext
-
- b_ciphertext_data = binascii.hexlify(b_ciphertext)
- b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
- # reformat
- b_invalid_ciphertext = self.v._format_output(b_payload)
-
- # assert we throw an error
- self.v.decrypt(b_invalid_ciphertext)
-
def test_decrypt_and_get_vault_id(self):
b_expected_plaintext = to_bytes('foo bar\n')
vaulttext = '''$ANSIBLE_VAULT;1.2;AES256;ansible_devel
diff --git a/test/units/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
index 77509f0..28561c6 100644
--- a/test/units/parsing/vault/test_vault_editor.py
+++ b/test/units/parsing/vault/test_vault_editor.py
@@ -33,8 +33,7 @@ from ansible import errors
from ansible.parsing import vault
from ansible.parsing.vault import VaultLib, VaultEditor, match_encrypt_secret
-from ansible.module_utils.six import PY3
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
from units.mock.vault_helper import TextVaultSecret
@@ -88,12 +87,10 @@ class TestVaultEditor(unittest.TestCase):
suffix = '_ansible_unit_test_%s_' % (self.__class__.__name__)
return tempfile.mkdtemp(suffix=suffix)
- def _create_file(self, test_dir, name, content=None, symlink=False):
+ def _create_file(self, test_dir, name, content, symlink=False):
file_path = os.path.join(test_dir, name)
- opened_file = open(file_path, 'wb')
- if content:
+ with open(file_path, 'wb') as opened_file:
opened_file.write(content)
- opened_file.close()
return file_path
def _vault_editor(self, vault_secrets=None):
@@ -118,11 +115,8 @@ class TestVaultEditor(unittest.TestCase):
def test_stdin_binary(self):
stdin_data = '\0'
- if PY3:
- fake_stream = StringIO(stdin_data)
- fake_stream.buffer = BytesIO(to_bytes(stdin_data))
- else:
- fake_stream = BytesIO(to_bytes(stdin_data))
+ fake_stream = StringIO(stdin_data)
+ fake_stream.buffer = BytesIO(to_bytes(stdin_data))
with patch('sys.stdin', fake_stream):
ve = self._vault_editor()
@@ -167,17 +161,15 @@ class TestVaultEditor(unittest.TestCase):
self.assertNotEqual(src_file_contents, b_ciphertext,
'b_ciphertext should be encrypted and not equal to src_contents')
- def _faux_editor(self, editor_args, new_src_contents=None):
+ def _faux_editor(self, editor_args, new_src_contents):
if editor_args[0] == 'shred':
return
tmp_path = editor_args[-1]
# simulate the tmp file being editted
- tmp_file = open(tmp_path, 'wb')
- if new_src_contents:
+ with open(tmp_path, 'wb') as tmp_file:
tmp_file.write(new_src_contents)
- tmp_file.close()
def _faux_command(self, tmp_path):
pass
@@ -198,13 +190,13 @@ class TestVaultEditor(unittest.TestCase):
ve._edit_file_helper(src_file_path, self.vault_secret, existing_data=src_file_contents)
- new_target_file = open(src_file_path, 'rb')
- new_target_file_contents = new_target_file.read()
- self.assertEqual(src_file_contents, new_target_file_contents)
+ with open(src_file_path, 'rb') as new_target_file:
+ new_target_file_contents = new_target_file.read()
+ self.assertEqual(src_file_contents, new_target_file_contents)
def _assert_file_is_encrypted(self, vault_editor, src_file_path, src_contents):
- new_src_file = open(src_file_path, 'rb')
- new_src_file_contents = new_src_file.read()
+ with open(src_file_path, 'rb') as new_src_file:
+ new_src_file_contents = new_src_file.read()
# TODO: assert that it is encrypted
self.assertTrue(vault.is_encrypted(new_src_file_contents))
@@ -339,8 +331,8 @@ class TestVaultEditor(unittest.TestCase):
ve.encrypt_file(src_file_path, self.vault_secret)
ve.edit_file(src_file_path)
- new_src_file = open(src_file_path, 'rb')
- new_src_file_contents = new_src_file.read()
+ with open(src_file_path, 'rb') as new_src_file:
+ new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.1;AES256' in new_src_file_contents)
@@ -367,8 +359,8 @@ class TestVaultEditor(unittest.TestCase):
vault_id='vault_secrets')
ve.edit_file(src_file_path)
- new_src_file = open(src_file_path, 'rb')
- new_src_file_contents = new_src_file.read()
+ with open(src_file_path, 'rb') as new_src_file:
+ new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.2;AES256;vault_secrets' in new_src_file_contents)
@@ -399,8 +391,8 @@ class TestVaultEditor(unittest.TestCase):
ve.edit_file(src_file_link_path)
- new_src_file = open(src_file_path, 'rb')
- new_src_file_contents = new_src_file.read()
+ with open(src_file_path, 'rb') as new_src_file:
+ new_src_file_contents = new_src_file.read()
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
@@ -418,13 +410,6 @@ class TestVaultEditor(unittest.TestCase):
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
- new_src_contents = to_bytes("The info is different now.")
-
- def faux_editor(editor_args):
- self._faux_editor(editor_args, new_src_contents)
-
- mock_sp_call.side_effect = faux_editor
-
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
@@ -478,20 +463,14 @@ class TestVaultEditor(unittest.TestCase):
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
- error_hit = False
- try:
- ve.decrypt_file(v11_file.name)
- except errors.AnsibleError:
- error_hit = True
+ ve.decrypt_file(v11_file.name)
# verify decrypted content
- f = open(v11_file.name, "rb")
- fdata = to_text(f.read())
- f.close()
+ with open(v11_file.name, "rb") as f:
+ fdata = to_text(f.read())
os.unlink(v11_file.name)
- assert error_hit is False, "error decrypting 1.1 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_real_path_dash(self):
@@ -501,21 +480,9 @@ class TestVaultEditor(unittest.TestCase):
res = ve._real_path(filename)
self.assertEqual(res, '-')
- def test_real_path_dev_null(self):
+ def test_real_path_not_dash(self):
filename = '/dev/null'
ve = self._vault_editor()
res = ve._real_path(filename)
- self.assertEqual(res, '/dev/null')
-
- def test_real_path_symlink(self):
- self._test_dir = os.path.realpath(self._create_test_dir())
- file_path = self._create_file(self._test_dir, 'test_file', content=b'this is a test file')
- file_link_path = os.path.join(self._test_dir, 'a_link_to_test_file')
-
- os.symlink(file_path, file_link_path)
-
- ve = self._vault_editor()
-
- res = ve._real_path(file_link_path)
- self.assertEqual(res, file_path)
+ self.assertNotEqual(res, '-')
diff --git a/test/units/parsing/yaml/test_dumper.py b/test/units/parsing/yaml/test_dumper.py
index cbf5b45..8af1eee 100644
--- a/test/units/parsing/yaml/test_dumper.py
+++ b/test/units/parsing/yaml/test_dumper.py
@@ -19,7 +19,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import io
-import yaml
from jinja2.exceptions import UndefinedError
@@ -27,7 +26,6 @@ from units.compat import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
-from ansible.module_utils.six import PY2
from ansible.template import AnsibleUndefined
from units.mock.yaml_helper import YamlTestUtils
@@ -76,20 +74,6 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
data_from_yaml = loader.get_single_data()
result = b_text
- if PY2:
- # https://pyyaml.org/wiki/PyYAMLDocumentation#string-conversion-python-2-only
- # pyyaml on Python 2 can return either unicode or bytes when given byte strings.
- # We normalize that to always return unicode on Python2 as that's right most of the
- # time. However, this means byte strings can round trip through yaml on Python3 but
- # not on Python2. To make this code work the same on Python2 and Python3 (we want
- # the Python3 behaviour) we need to change the methods in Ansible to:
- # (1) Let byte strings pass through yaml without being converted on Python2
- # (2) Convert byte strings to text strings before being given to pyyaml (Without this,
- # strings would end up as byte strings most of the time which would mostly be wrong)
- # In practice, we mostly read bytes in from files and then pass that to pyyaml, for which
- # the present behavior is correct.
- # This is a workaround for the current behavior.
- result = u'tr\xe9ma'
self.assertEqual(result, data_from_yaml)
@@ -105,10 +89,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
self.assertEqual(u_text, data_from_yaml)
def test_vars_with_sources(self):
- try:
- self._dump_string(VarsWithSources(), dumper=self.dumper)
- except yaml.representer.RepresenterError:
- self.fail("Dump VarsWithSources raised RepresenterError unexpectedly!")
+ self._dump_string(VarsWithSources(), dumper=self.dumper)
def test_undefined(self):
undefined_object = AnsibleUndefined()
diff --git a/test/units/parsing/yaml/test_objects.py b/test/units/parsing/yaml/test_objects.py
index f64b708..f899915 100644
--- a/test/units/parsing/yaml/test_objects.py
+++ b/test/units/parsing/yaml/test_objects.py
@@ -24,7 +24,7 @@ from units.compat import unittest
from ansible.errors import AnsibleError
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.parsing import vault
from ansible.parsing.yaml.loader import AnsibleLoader
@@ -105,11 +105,6 @@ class TestAnsibleVaultEncryptedUnicode(unittest.TestCase, YamlTestUtils):
id_secret = vault.match_encrypt_secret(self.good_vault_secrets)
return objects.AnsibleVaultEncryptedUnicode.from_plaintext(seq, vault=self.vault, secret=id_secret[1])
- def _from_ciphertext(self, ciphertext):
- avu = objects.AnsibleVaultEncryptedUnicode(ciphertext)
- avu.vault = self.vault
- return avu
-
def test_empty_init(self):
self.assertRaises(TypeError, objects.AnsibleVaultEncryptedUnicode)
diff --git a/test/units/playbook/role/test_include_role.py b/test/units/playbook/role/test_include_role.py
index 5e7625b..aa97da1 100644
--- a/test/units/playbook/role/test_include_role.py
+++ b/test/units/playbook/role/test_include_role.py
@@ -108,8 +108,6 @@ class TestIncludeRole(unittest.TestCase):
# skip meta: role_complete
continue
role = task._role
- if not role:
- continue
yield (role.get_name(),
self.var_manager.get_vars(play=play, task=task))
@@ -201,7 +199,7 @@ class TestIncludeRole(unittest.TestCase):
self.assertEqual(task_vars.get('l3_variable'), 'l3-main')
self.assertEqual(task_vars.get('test_variable'), 'l3-main')
else:
- self.fail()
+ self.fail() # pragma: nocover
self.assertFalse(expected_roles)
@patch('ansible.playbook.role.definition.unfrackpath',
@@ -247,5 +245,5 @@ class TestIncludeRole(unittest.TestCase):
self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')
self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
else:
- self.fail()
+ self.fail() # pragma: nocover
self.assertFalse(expected_roles)
diff --git a/test/units/playbook/role/test_role.py b/test/units/playbook/role/test_role.py
index 5d47631..9d6b0ed 100644
--- a/test/units/playbook/role/test_role.py
+++ b/test/units/playbook/role/test_role.py
@@ -21,10 +21,12 @@ __metaclass__ = type
from collections.abc import Container
+import pytest
+
from units.compat import unittest
from unittest.mock import patch, MagicMock
-from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.errors import AnsibleParserError
from ansible.playbook.block import Block
from units.mock.loader import DictDataLoader
@@ -42,12 +44,9 @@ class TestHashParams(unittest.TestCase):
self._assert_set(res)
self._assert_hashable(res)
- def _assert_hashable(self, res):
- a_dict = {}
- try:
- a_dict[res] = res
- except TypeError as e:
- self.fail('%s is not hashable: %s' % (res, e))
+ @staticmethod
+ def _assert_hashable(res):
+ hash(res)
def _assert_set(self, res):
self.assertIsInstance(res, frozenset)
@@ -87,36 +86,28 @@ class TestHashParams(unittest.TestCase):
def test_generator(self):
def my_generator():
- for i in ['a', 1, None, {}]:
- yield i
+ yield
params = my_generator()
res = hash_params(params)
self._assert_hashable(res)
+ assert list(params)
def test_container_but_not_iterable(self):
# This is a Container that is not iterable, which is unlikely but...
class MyContainer(Container):
- def __init__(self, some_thing):
- self.data = []
- self.data.append(some_thing)
+ def __init__(self, _some_thing):
+ pass
def __contains__(self, item):
- return item in self.data
-
- def __hash__(self):
- return hash(self.data)
-
- def __len__(self):
- return len(self.data)
+ """Implementation omitted, since it will never be called."""
- def __call__(self):
- return False
+ params = MyContainer('foo bar')
- foo = MyContainer('foo bar')
- params = foo
+ with pytest.raises(TypeError) as ex:
+ hash_params(params)
- self.assertRaises(TypeError, hash_params, params)
+ assert ex.value.args == ("'MyContainer' object is not iterable",)
def test_param_dict_dupe_values(self):
params1 = {'foo': False}
@@ -151,18 +142,18 @@ class TestHashParams(unittest.TestCase):
self.assertNotEqual(hash(res1), hash(res2))
self.assertNotEqual(res1, res2)
- foo = {}
- foo[res1] = 'params1'
- foo[res2] = 'params2'
+ params_dict = {}
+ params_dict[res1] = 'params1'
+ params_dict[res2] = 'params2'
- self.assertEqual(len(foo), 2)
+ self.assertEqual(len(params_dict), 2)
- del foo[res2]
- self.assertEqual(len(foo), 1)
+ del params_dict[res2]
+ self.assertEqual(len(params_dict), 1)
- for key in foo:
- self.assertTrue(key in foo)
- self.assertIn(key, foo)
+ for key in params_dict:
+ self.assertTrue(key in params_dict)
+ self.assertIn(key, params_dict)
class TestRole(unittest.TestCase):
@@ -177,7 +168,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -199,7 +190,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play, from_files=dict(tasks='custom_main'))
@@ -217,7 +208,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_handlers', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -238,7 +229,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -259,7 +250,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -280,7 +271,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -303,7 +294,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -323,7 +314,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -370,7 +361,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.collections = None
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load('foo_metadata', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -415,7 +406,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.ROLE_CACHE = {}
+ mock_play.role_cache = {}
i = RoleInclude.load(dict(role='foo_complex'), play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
diff --git a/test/units/playbook/test_base.py b/test/units/playbook/test_base.py
index d5810e7..bedd96a 100644
--- a/test/units/playbook/test_base.py
+++ b/test/units/playbook/test_base.py
@@ -21,13 +21,12 @@ __metaclass__ = type
from units.compat import unittest
-from ansible.errors import AnsibleParserError
+from ansible.errors import AnsibleParserError, AnsibleAssertionError
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute, NonInheritableFieldAttribute
from ansible.template import Templar
from ansible.playbook import base
-from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText
-from ansible.utils.sentinel import Sentinel
+from ansible.utils.unsafe_proxy import AnsibleUnsafeText
from units.mock.loader import DictDataLoader
@@ -331,12 +330,6 @@ class ExampleSubClass(base.Base):
def __init__(self):
super(ExampleSubClass, self).__init__()
- def get_dep_chain(self):
- if self._parent:
- return self._parent.get_dep_chain()
- else:
- return None
-
class BaseSubClass(base.Base):
name = FieldAttribute(isa='string', default='', always_post_validate=True)
@@ -588,10 +581,11 @@ class TestBaseSubClass(TestBase):
bsc.post_validate, templar)
def test_attr_unknown(self):
- a_list = ['some string']
- ds = {'test_attr_unknown_isa': a_list}
- bsc = self._base_validate(ds)
- self.assertEqual(bsc.test_attr_unknown_isa, a_list)
+ self.assertRaises(
+ AnsibleAssertionError,
+ self._base_validate,
+ {'test_attr_unknown_isa': True}
+ )
def test_attr_method(self):
ds = {'test_attr_method': 'value from the ds'}
diff --git a/test/units/playbook/test_collectionsearch.py b/test/units/playbook/test_collectionsearch.py
index be40d85..d16541b 100644
--- a/test/units/playbook/test_collectionsearch.py
+++ b/test/units/playbook/test_collectionsearch.py
@@ -22,7 +22,6 @@ from ansible.errors import AnsibleParserError
from ansible.playbook.play import Play
from ansible.playbook.task import Task
from ansible.playbook.block import Block
-from ansible.playbook.collectionsearch import CollectionSearch
import pytest
diff --git a/test/units/playbook/test_helpers.py b/test/units/playbook/test_helpers.py
index a89730c..23385c0 100644
--- a/test/units/playbook/test_helpers.py
+++ b/test/units/playbook/test_helpers.py
@@ -52,10 +52,6 @@ class MixinForMocks(object):
self.mock_inventory = MagicMock(name='MockInventory')
self.mock_inventory._hosts_cache = dict()
- def _get_host(host_name):
- return None
-
- self.mock_inventory.get_host.side_effect = _get_host
# TODO: can we use a real VariableManager?
self.mock_variable_manager = MagicMock(name='MockVariableManager')
self.mock_variable_manager.get_vars.return_value = dict()
@@ -69,11 +65,11 @@ class MixinForMocks(object):
self._test_data_path = os.path.dirname(__file__)
self.fake_include_loader = DictDataLoader({"/dev/null/includes/test_include.yml": """
- - include: other_test_include.yml
+ - include_tasks: other_test_include.yml
- shell: echo 'hello world'
""",
"/dev/null/includes/static_test_include.yml": """
- - include: other_test_include.yml
+ - include_tasks: other_test_include.yml
- shell: echo 'hello static world'
""",
"/dev/null/includes/other_test_include.yml": """
@@ -86,10 +82,6 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
def setUp(self):
self._setup()
- def _assert_is_task_list(self, results):
- for result in results:
- self.assertIsInstance(result, Task)
-
def _assert_is_task_list_or_blocks(self, results):
self.assertIsInstance(results, list)
for result in results:
@@ -168,57 +160,57 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
- def test_one_bogus_include(self):
- ds = [{'include': 'somefile.yml'}]
+ def test_one_bogus_include_tasks(self):
+ ds = [{'include_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
- self.assertEqual(len(res), 0)
+ self.assertEqual(len(res), 1)
+ self.assertIsInstance(res[0], TaskInclude)
- def test_one_bogus_include_use_handlers(self):
- ds = [{'include': 'somefile.yml'}]
+ def test_one_bogus_include_tasks_use_handlers(self):
+ ds = [{'include_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
- self.assertEqual(len(res), 0)
+ self.assertEqual(len(res), 1)
+ self.assertIsInstance(res[0], TaskInclude)
- def test_one_bogus_include_static(self):
+ def test_one_bogus_import_tasks(self):
ds = [{'import_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 0)
- def test_one_include(self):
- ds = [{'include': '/dev/null/includes/other_test_include.yml'}]
+ def test_one_include_tasks(self):
+ ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self.assertEqual(len(res), 1)
self._assert_is_task_list_or_blocks(res)
- def test_one_parent_include(self):
- ds = [{'include': '/dev/null/includes/test_include.yml'}]
+ def test_one_parent_include_tasks(self):
+ ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], Block)
- self.assertIsInstance(res[0]._parent, TaskInclude)
+ self.assertIsInstance(res[0], TaskInclude)
+ self.assertIsNone(res[0]._parent)
- # TODO/FIXME: do this non deprecated way
- def test_one_include_tags(self):
- ds = [{'include': '/dev/null/includes/other_test_include.yml',
+ def test_one_include_tasks_tags(self):
+ ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml',
'tags': ['test_one_include_tags_tag1', 'and_another_tagB']
}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], Block)
+ self.assertIsInstance(res[0], TaskInclude)
self.assertIn('test_one_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tagB', res[0].tags)
- # TODO/FIXME: do this non deprecated way
- def test_one_parent_include_tags(self):
- ds = [{'include': '/dev/null/includes/test_include.yml',
+ def test_one_parent_include_tasks_tags(self):
+ ds = [{'include_tasks': '/dev/null/includes/test_include.yml',
# 'vars': {'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']}
'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']
}
@@ -226,20 +218,20 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], Block)
+ self.assertIsInstance(res[0], TaskInclude)
self.assertIn('test_one_parent_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tag2', res[0].tags)
- def test_one_include_use_handlers(self):
- ds = [{'include': '/dev/null/includes/other_test_include.yml'}]
+ def test_one_include_tasks_use_handlers(self):
+ ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Handler)
- def test_one_parent_include_use_handlers(self):
- ds = [{'include': '/dev/null/includes/test_include.yml'}]
+ def test_one_parent_include_tasks_use_handlers(self):
+ ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
diff --git a/test/units/playbook/test_included_file.py b/test/units/playbook/test_included_file.py
index 7341dff..c7a66b0 100644
--- a/test/units/playbook/test_included_file.py
+++ b/test/units/playbook/test_included_file.py
@@ -105,7 +105,7 @@ def test_included_file_instantiation():
assert inc_file._task is None
-def test_process_include_results(mock_iterator, mock_variable_manager):
+def test_process_include_tasks_results(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -113,7 +113,7 @@ def test_process_include_results(mock_iterator, mock_variable_manager):
parent_task = Task.load(parent_task_ds)
parent_task._play = None
- task_ds = {'include': 'include_test.yml'}
+ task_ds = {'include_tasks': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
return_data = {'include': 'include_test.yml'}
@@ -133,7 +133,7 @@ def test_process_include_results(mock_iterator, mock_variable_manager):
assert res[0]._vars == {}
-def test_process_include_diff_files(mock_iterator, mock_variable_manager):
+def test_process_include_tasks_diff_files(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -141,11 +141,11 @@ def test_process_include_diff_files(mock_iterator, mock_variable_manager):
parent_task = Task.load(parent_task_ds)
parent_task._play = None
- task_ds = {'include': 'include_test.yml'}
+ task_ds = {'include_tasks': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
loaded_task._play = None
- child_task_ds = {'include': 'other_include_test.yml'}
+ child_task_ds = {'include_tasks': 'other_include_test.yml'}
loaded_child_task = TaskInclude.load(child_task_ds, task_include=loaded_task)
loaded_child_task._play = None
@@ -175,7 +175,7 @@ def test_process_include_diff_files(mock_iterator, mock_variable_manager):
assert res[1]._vars == {}
-def test_process_include_simulate_free(mock_iterator, mock_variable_manager):
+def test_process_include_tasks_simulate_free(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -186,7 +186,7 @@ def test_process_include_simulate_free(mock_iterator, mock_variable_manager):
parent_task1._play = None
parent_task2._play = None
- task_ds = {'include': 'include_test.yml'}
+ task_ds = {'include_tasks': 'include_test.yml'}
loaded_task1 = TaskInclude.load(task_ds, task_include=parent_task1)
loaded_task2 = TaskInclude.load(task_ds, task_include=parent_task2)
diff --git a/test/units/playbook/test_play_context.py b/test/units/playbook/test_play_context.py
index 7c24de5..7461b45 100644
--- a/test/units/playbook/test_play_context.py
+++ b/test/units/playbook/test_play_context.py
@@ -12,10 +12,8 @@ import pytest
from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
-from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
from ansible.playbook.play import Play
-from ansible.plugins.loader import become_loader
from ansible.utils import context_objects as co
diff --git a/test/units/playbook/test_taggable.py b/test/units/playbook/test_taggable.py
index 3881e17..c6ce35d 100644
--- a/test/units/playbook/test_taggable.py
+++ b/test/units/playbook/test_taggable.py
@@ -29,6 +29,7 @@ class TaggableTestObj(Taggable):
def __init__(self):
self._loader = DictDataLoader({})
self.tags = []
+ self._parent = None
class TestTaggable(unittest.TestCase):
diff --git a/test/units/playbook/test_task.py b/test/units/playbook/test_task.py
index 070d7aa..e28d2ec 100644
--- a/test/units/playbook/test_task.py
+++ b/test/units/playbook/test_task.py
@@ -22,6 +22,7 @@ __metaclass__ = type
from units.compat import unittest
from unittest.mock import patch
from ansible.playbook.task import Task
+from ansible.plugins.loader import init_plugin_loader
from ansible.parsing.yaml import objects
from ansible import errors
@@ -74,6 +75,7 @@ class TestTask(unittest.TestCase):
@patch.object(errors.AnsibleError, '_get_error_lines_from_file')
def test_load_task_kv_form_error_36848(self, mock_get_err_lines):
+ init_plugin_loader()
ds = objects.AnsibleMapping(kv_bad_args_ds)
ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1)
mock_get_err_lines.return_value = (kv_bad_args_str, '')
diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py
index f2bbe19..33d09c4 100644
--- a/test/units/plugins/action/test_action.py
+++ b/test/units/plugins/action/test_action.py
@@ -22,6 +22,7 @@ __metaclass__ = type
import os
import re
+from importlib import import_module
from ansible import constants as C
from units.compat import unittest
@@ -30,9 +31,10 @@ from unittest.mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError, AnsibleAuthenticationFailure
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import shlex_quote, builtins
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.action import ActionBase
+from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar
from ansible.vars.clean import clean_facts
@@ -109,6 +111,11 @@ class TestActionBase(unittest.TestCase):
self.assertEqual(results, {})
def test_action_base__configure_module(self):
+ init_plugin_loader()
+ # Pre-populate the ansible.builtin collection
+ # so reading the ansible_builtin_runtime.yml happens
+ # before the mock_open below
+ import_module('ansible_collections.ansible.builtin')
fake_loader = DictDataLoader({
})
@@ -262,11 +269,8 @@ class TestActionBase(unittest.TestCase):
def get_shell_opt(opt):
- ret = None
- if opt == 'admin_users':
- ret = ['root', 'toor', 'Administrator']
- elif opt == 'remote_tmp':
- ret = '~/.ansible/tmp'
+ assert opt == 'admin_users'
+ ret = ['root', 'toor', 'Administrator']
return ret
@@ -662,17 +666,10 @@ class TestActionBase(unittest.TestCase):
mock_task.no_log = False
# create a mock connection, so we don't actually try and connect to things
- def build_module_command(env_string, shebang, cmd, arg_path=None):
- to_run = [env_string, cmd]
- if arg_path:
- to_run.append(arg_path)
- return " ".join(to_run)
-
def get_option(option):
return {'admin_users': ['root', 'toor']}.get(option)
mock_connection = MagicMock()
- mock_connection.build_module_command.side_effect = build_module_command
mock_connection.socket_path = None
mock_connection._shell.get_remote_filename.return_value = 'copy.py'
mock_connection._shell.join_path.side_effect = os.path.join
@@ -799,41 +796,7 @@ class TestActionBase(unittest.TestCase):
class TestActionBaseCleanReturnedData(unittest.TestCase):
def test(self):
-
- fake_loader = DictDataLoader({
- })
- mock_module_loader = MagicMock()
- mock_shared_loader_obj = MagicMock()
- mock_shared_loader_obj.module_loader = mock_module_loader
- connection_loader_paths = ['/tmp/asdfadf', '/usr/lib64/whatever',
- 'dfadfasf',
- 'foo.py',
- '.*',
- # FIXME: a path with parans breaks the regex
- # '(.*)',
- '/path/to/ansible/lib/ansible/plugins/connection/custom_connection.py',
- '/path/to/ansible/lib/ansible/plugins/connection/ssh.py']
-
- def fake_all(path_only=None):
- for path in connection_loader_paths:
- yield path
-
- mock_connection_loader = MagicMock()
- mock_connection_loader.all = fake_all
-
- mock_shared_loader_obj.connection_loader = mock_connection_loader
- mock_connection = MagicMock()
- # mock_connection._shell.env_prefix.side_effect = env_prefix
-
- # action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
- action_base = DerivedActionBase(task=None,
- connection=mock_connection,
- play_context=None,
- loader=fake_loader,
- templar=None,
- shared_loader_obj=mock_shared_loader_obj)
data = {'ansible_playbook_python': '/usr/bin/python',
- # 'ansible_rsync_path': '/usr/bin/rsync',
'ansible_python_interpreter': '/usr/bin/python',
'ansible_ssh_some_var': 'whatever',
'ansible_ssh_host_key_somehost': 'some key here',
diff --git a/test/units/plugins/action/test_raw.py b/test/units/plugins/action/test_raw.py
index 3348051..c50004a 100644
--- a/test/units/plugins/action/test_raw.py
+++ b/test/units/plugins/action/test_raw.py
@@ -20,7 +20,6 @@ __metaclass__ = type
import os
-from ansible.errors import AnsibleActionFail
from units.compat import unittest
from unittest.mock import MagicMock, Mock
from ansible.plugins.action.raw import ActionModule
@@ -68,10 +67,7 @@ class TestCopyResultExclude(unittest.TestCase):
task.args = {'_raw_params': 'Args1'}
self.play_context.check_mode = True
- try:
- self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
- except AnsibleActionFail:
- pass
+ self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
def test_raw_test_environment_is_None(self):
diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py
index 25b84c0..b4ffe4e 100644
--- a/test/units/plugins/cache/test_cache.py
+++ b/test/units/plugins/cache/test_cache.py
@@ -29,7 +29,7 @@ from units.compat import unittest
from ansible.errors import AnsibleError
from ansible.plugins.cache import CachePluginAdjudicator
from ansible.plugins.cache.memory import CacheModule as MemoryCache
-from ansible.plugins.loader import cache_loader
+from ansible.plugins.loader import cache_loader, init_plugin_loader
from ansible.vars.fact_cache import FactCache
import pytest
@@ -66,7 +66,7 @@ class TestCachePluginAdjudicator(unittest.TestCase):
def test___getitem__(self):
with pytest.raises(KeyError):
- self.cache['foo']
+ self.cache['foo'] # pylint: disable=pointless-statement
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
@@ -183,6 +183,7 @@ class TestFactCache(unittest.TestCase):
assert len(self.cache.keys()) == 0
def test_plugin_load_failure(self):
+ init_plugin_loader()
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
diff --git a/test/units/plugins/connection/test_connection.py b/test/units/plugins/connection/test_connection.py
index 38d6691..56095c6 100644
--- a/test/units/plugins/connection/test_connection.py
+++ b/test/units/plugins/connection/test_connection.py
@@ -27,6 +27,28 @@ from ansible.plugins.connection import ConnectionBase
from ansible.plugins.loader import become_loader
+class NoOpConnection(ConnectionBase):
+
+ @property
+ def transport(self):
+ """This method is never called by unit tests."""
+
+ def _connect(self):
+ """This method is never called by unit tests."""
+
+ def exec_command(self):
+ """This method is never called by unit tests."""
+
+ def put_file(self):
+ """This method is never called by unit tests."""
+
+ def fetch_file(self):
+ """This method is never called by unit tests."""
+
+ def close(self):
+ """This method is never called by unit tests."""
+
+
class TestConnectionBaseClass(unittest.TestCase):
def setUp(self):
@@ -45,36 +67,8 @@ class TestConnectionBaseClass(unittest.TestCase):
with self.assertRaises(TypeError):
ConnectionModule1() # pylint: disable=abstract-class-instantiated
- class ConnectionModule2(ConnectionBase):
- def get(self, key):
- super(ConnectionModule2, self).get(key)
-
- with self.assertRaises(TypeError):
- ConnectionModule2() # pylint: disable=abstract-class-instantiated
-
def test_subclass_success(self):
- class ConnectionModule3(ConnectionBase):
-
- @property
- def transport(self):
- pass
-
- def _connect(self):
- pass
-
- def exec_command(self):
- pass
-
- def put_file(self):
- pass
-
- def fetch_file(self):
- pass
-
- def close(self):
- pass
-
- self.assertIsInstance(ConnectionModule3(self.play_context, self.in_stream), ConnectionModule3)
+ self.assertIsInstance(NoOpConnection(self.play_context, self.in_stream), NoOpConnection)
def test_check_password_prompt(self):
local = (
@@ -129,28 +123,7 @@ debug3: receive packet: type 98
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo
'''
- class ConnectionFoo(ConnectionBase):
-
- @property
- def transport(self):
- pass
-
- def _connect(self):
- pass
-
- def exec_command(self):
- pass
-
- def put_file(self):
- pass
-
- def fetch_file(self):
- pass
-
- def close(self):
- pass
-
- c = ConnectionFoo(self.play_context, self.in_stream)
+ c = NoOpConnection(self.play_context, self.in_stream)
c.set_become_plugin(become_loader.get('sudo'))
c.become.prompt = '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
diff --git a/test/units/plugins/connection/test_local.py b/test/units/plugins/connection/test_local.py
index e552585..483a881 100644
--- a/test/units/plugins/connection/test_local.py
+++ b/test/units/plugins/connection/test_local.py
@@ -21,7 +21,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
-import pytest
from units.compat import unittest
from ansible.plugins.connection import local
diff --git a/test/units/plugins/connection/test_paramiko.py b/test/units/plugins/connection/test_paramiko_ssh.py
index dcf3177..0307261 100644
--- a/test/units/plugins/connection/test_paramiko.py
+++ b/test/units/plugins/connection/test_paramiko_ssh.py
@@ -23,7 +23,8 @@ __metaclass__ = type
from io import StringIO
import pytest
-from ansible.plugins.connection import paramiko_ssh
+from ansible.plugins.connection import paramiko_ssh as paramiko_ssh_module
+from ansible.plugins.loader import connection_loader
from ansible.playbook.play_context import PlayContext
@@ -44,13 +45,14 @@ def in_stream():
def test_paramiko_connection_module(play_context, in_stream):
assert isinstance(
- paramiko_ssh.Connection(play_context, in_stream),
- paramiko_ssh.Connection)
+ connection_loader.get('paramiko_ssh', play_context, in_stream),
+ paramiko_ssh_module.Connection)
def test_paramiko_connect(play_context, in_stream, mocker):
- mocker.patch.object(paramiko_ssh.Connection, '_connect_uncached')
- connection = paramiko_ssh.Connection(play_context, in_stream)._connect()
+ paramiko_ssh = connection_loader.get('paramiko_ssh', play_context, in_stream)
+ mocker.patch.object(paramiko_ssh, '_connect_uncached')
+ connection = paramiko_ssh._connect()
- assert isinstance(connection, paramiko_ssh.Connection)
+ assert isinstance(connection, paramiko_ssh_module.Connection)
assert connection._connected is True
diff --git a/test/units/plugins/connection/test_ssh.py b/test/units/plugins/connection/test_ssh.py
index 662dff9..48ad3b7 100644
--- a/test/units/plugins/connection/test_ssh.py
+++ b/test/units/plugins/connection/test_ssh.py
@@ -24,14 +24,13 @@ from io import StringIO
import pytest
-from ansible import constants as C
from ansible.errors import AnsibleAuthenticationFailure
from units.compat import unittest
from unittest.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.compat.selectors import SelectorKey, EVENT_READ
from ansible.module_utils.six.moves import shlex_quote
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.plugins.loader import connection_loader, become_loader
@@ -142,9 +141,8 @@ class TestConnectionBaseClass(unittest.TestCase):
conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
def get_option(option):
- if option == 'become_pass':
- return 'password'
- return None
+ assert option == 'become_pass'
+ return 'password'
conn.become.get_option = get_option
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
@@ -351,7 +349,7 @@ class MockSelector(object):
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
- self.get_map = MagicMock(side_effect=self._get_map)
+ self.get_map = MagicMock()
self.select = MagicMock()
def _register(self, *args, **kwargs):
@@ -360,9 +358,6 @@ class MockSelector(object):
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
- def _get_map(self, *args, **kwargs):
- return self.files_watched
-
@pytest.fixture
def mock_run_env(request, mocker):
@@ -457,7 +452,8 @@ class TestSSHConnectionRun(object):
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
- elif state == 'awaiting_escalation':
+ else:
+ assert state == 'awaiting_escalation'
self.conn._flags['become_success'] = True
return (b'', b'')
@@ -546,7 +542,6 @@ class TestSSHConnectionRetries(object):
def test_incorrect_password(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 5)
- monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b'']
self.mock_popen_res.stderr.read.side_effect = [b'Permission denied, please try again.\r\n']
@@ -669,7 +664,6 @@ class TestSSHConnectionRetries(object):
self.conn.set_option('reconnection_retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
- monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
diff --git a/test/units/plugins/connection/test_winrm.py b/test/units/plugins/connection/test_winrm.py
index cb52814..c3060da 100644
--- a/test/units/plugins/connection/test_winrm.py
+++ b/test/units/plugins/connection/test_winrm.py
@@ -13,8 +13,8 @@ import pytest
from io import StringIO
from unittest.mock import MagicMock
-from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils._text import to_bytes
+from ansible.errors import AnsibleConnectionFailure, AnsibleError
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
from ansible.plugins.connection import winrm
@@ -441,3 +441,103 @@ class TestWinRMKerbAuth(object):
assert str(err.value) == \
"Kerberos auth failure for principal username with pexpect: " \
"Error with kinit\n<redacted>"
+
+ def test_exec_command_with_timeout(self, monkeypatch):
+ requests_exc = pytest.importorskip("requests.exceptions")
+
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+
+ mock_proto = MagicMock()
+ mock_proto.run_command.side_effect = requests_exc.Timeout("msg")
+
+ conn._connected = True
+ conn._winrm_host = 'hostname'
+
+ monkeypatch.setattr(conn, "_winrm_connect", lambda: mock_proto)
+
+ with pytest.raises(AnsibleConnectionFailure) as e:
+ conn.exec_command('cmd', in_data=None, sudoable=True)
+
+ assert str(e.value) == "winrm connection error: msg"
+
+ def test_exec_command_get_output_timeout(self, monkeypatch):
+ requests_exc = pytest.importorskip("requests.exceptions")
+
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+
+ mock_proto = MagicMock()
+ mock_proto.run_command.return_value = "command_id"
+ mock_proto.send_message.side_effect = requests_exc.Timeout("msg")
+
+ conn._connected = True
+ conn._winrm_host = 'hostname'
+
+ monkeypatch.setattr(conn, "_winrm_connect", lambda: mock_proto)
+
+ with pytest.raises(AnsibleConnectionFailure) as e:
+ conn.exec_command('cmd', in_data=None, sudoable=True)
+
+ assert str(e.value) == "winrm connection error: msg"
+
+ def test_connect_failure_auth_401(self, monkeypatch):
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+ conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
+
+ mock_proto = MagicMock()
+ mock_proto.open_shell.side_effect = ValueError("Custom exc Code 401")
+
+ mock_proto_init = MagicMock()
+ mock_proto_init.return_value = mock_proto
+ monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
+
+ with pytest.raises(AnsibleConnectionFailure, match="the specified credentials were rejected by the server"):
+ conn.exec_command('cmd', in_data=None, sudoable=True)
+
+ def test_connect_failure_other_exception(self, monkeypatch):
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+ conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
+
+ mock_proto = MagicMock()
+ mock_proto.open_shell.side_effect = ValueError("Custom exc")
+
+ mock_proto_init = MagicMock()
+ mock_proto_init.return_value = mock_proto
+ monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
+
+ with pytest.raises(AnsibleConnectionFailure, match="basic: Custom exc"):
+ conn.exec_command('cmd', in_data=None, sudoable=True)
+
+ def test_connect_failure_operation_timed_out(self, monkeypatch):
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+ conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
+
+ mock_proto = MagicMock()
+ mock_proto.open_shell.side_effect = ValueError("Custom exc Operation timed out")
+
+ mock_proto_init = MagicMock()
+ mock_proto_init.return_value = mock_proto
+ monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
+
+ with pytest.raises(AnsibleError, match="the connection attempt timed out"):
+ conn.exec_command('cmd', in_data=None, sudoable=True)
+
+ def test_connect_no_transport(self):
+ pc = PlayContext()
+ new_stdin = StringIO()
+ conn = connection_loader.get('winrm', pc, new_stdin)
+ conn.set_options(var_options={"_extras": {}})
+ conn._build_winrm_kwargs()
+ conn._winrm_transport = []
+
+ with pytest.raises(AnsibleError, match="No transport found for WinRM connection"):
+ conn._winrm_connect()
diff --git a/test/units/plugins/filter/test_core.py b/test/units/plugins/filter/test_core.py
index df4e472..ab09ec4 100644
--- a/test/units/plugins/filter/test_core.py
+++ b/test/units/plugins/filter/test_core.py
@@ -3,13 +3,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
-from jinja2.runtime import Undefined
-from jinja2.exceptions import UndefinedError
__metaclass__ = type
import pytest
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.plugins.filter.core import to_uuid
from ansible.errors import AnsibleFilterError
diff --git a/test/units/plugins/filter/test_mathstuff.py b/test/units/plugins/filter/test_mathstuff.py
index f793871..4ac5487 100644
--- a/test/units/plugins/filter/test_mathstuff.py
+++ b/test/units/plugins/filter/test_mathstuff.py
@@ -1,9 +1,8 @@
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
import pytest
from jinja2 import Environment
@@ -12,54 +11,68 @@ import ansible.plugins.filter.mathstuff as ms
from ansible.errors import AnsibleFilterError, AnsibleFilterTypeError
-UNIQUE_DATA = (([1, 3, 4, 2], [1, 3, 4, 2]),
- ([1, 3, 2, 4, 2, 3], [1, 3, 2, 4]),
- (['a', 'b', 'c', 'd'], ['a', 'b', 'c', 'd']),
- (['a', 'a', 'd', 'b', 'a', 'd', 'c', 'b'], ['a', 'd', 'b', 'c']),
- )
+UNIQUE_DATA = [
+ ([], []),
+ ([1, 3, 4, 2], [1, 3, 4, 2]),
+ ([1, 3, 2, 4, 2, 3], [1, 3, 2, 4]),
+ ([1, 2, 3, 4], [1, 2, 3, 4]),
+ ([1, 1, 4, 2, 1, 4, 3, 2], [1, 4, 2, 3]),
+]
+
+TWO_SETS_DATA = [
+ ([], [], ([], [], [])),
+ ([1, 2], [1, 2], ([1, 2], [], [])),
+ ([1, 2], [3, 4], ([], [1, 2], [1, 2, 3, 4])),
+ ([1, 2, 3], [5, 3, 4], ([3], [1, 2], [1, 2, 5, 4])),
+ ([1, 2, 3], [4, 3, 5], ([3], [1, 2], [1, 2, 4, 5])),
+]
+
+
+def dict_values(values: list[int]) -> list[dict[str, int]]:
+ """Return a list of non-hashable values derived from the given list."""
+ return [dict(x=value) for value in values]
+
+
+for _data, _expected in list(UNIQUE_DATA):
+ UNIQUE_DATA.append((dict_values(_data), dict_values(_expected)))
+
+for _dataset1, _dataset2, _expected in list(TWO_SETS_DATA):
+ TWO_SETS_DATA.append((dict_values(_dataset1), dict_values(_dataset2), tuple(dict_values(answer) for answer in _expected)))
-TWO_SETS_DATA = (([1, 2], [3, 4], ([], sorted([1, 2]), sorted([1, 2, 3, 4]), sorted([1, 2, 3, 4]))),
- ([1, 2, 3], [5, 3, 4], ([3], sorted([1, 2]), sorted([1, 2, 5, 4]), sorted([1, 2, 3, 4, 5]))),
- (['a', 'b', 'c'], ['d', 'c', 'e'], (['c'], sorted(['a', 'b']), sorted(['a', 'b', 'd', 'e']), sorted(['a', 'b', 'c', 'e', 'd']))),
- )
env = Environment()
-@pytest.mark.parametrize('data, expected', UNIQUE_DATA)
-class TestUnique:
- def test_unhashable(self, data, expected):
- assert ms.unique(env, list(data)) == expected
+def assert_lists_contain_same_elements(a, b) -> None:
+ """Assert that the two values given are lists that contain the same elements, even when the elements cannot be sorted or hashed."""
+ assert isinstance(a, list)
+ assert isinstance(b, list)
- def test_hashable(self, data, expected):
- assert ms.unique(env, tuple(data)) == expected
+ missing_from_a = [item for item in b if item not in a]
+ missing_from_b = [item for item in a if item not in b]
+ assert not missing_from_a, f'elements from `b` {missing_from_a} missing from `a` {a}'
+ assert not missing_from_b, f'elements from `a` {missing_from_b} missing from `b` {b}'
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
-class TestIntersect:
- def test_unhashable(self, dataset1, dataset2, expected):
- assert sorted(ms.intersect(env, list(dataset1), list(dataset2))) == expected[0]
- def test_hashable(self, dataset1, dataset2, expected):
- assert sorted(ms.intersect(env, tuple(dataset1), tuple(dataset2))) == expected[0]
+@pytest.mark.parametrize('data, expected', UNIQUE_DATA, ids=str)
+def test_unique(data, expected):
+ assert_lists_contain_same_elements(ms.unique(env, data), expected)
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
-class TestDifference:
- def test_unhashable(self, dataset1, dataset2, expected):
- assert sorted(ms.difference(env, list(dataset1), list(dataset2))) == expected[1]
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
+def test_intersect(dataset1, dataset2, expected):
+ assert_lists_contain_same_elements(ms.intersect(env, dataset1, dataset2), expected[0])
- def test_hashable(self, dataset1, dataset2, expected):
- assert sorted(ms.difference(env, tuple(dataset1), tuple(dataset2))) == expected[1]
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
+def test_difference(dataset1, dataset2, expected):
+ assert_lists_contain_same_elements(ms.difference(env, dataset1, dataset2), expected[1])
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
-class TestSymmetricDifference:
- def test_unhashable(self, dataset1, dataset2, expected):
- assert sorted(ms.symmetric_difference(env, list(dataset1), list(dataset2))) == expected[2]
- def test_hashable(self, dataset1, dataset2, expected):
- assert sorted(ms.symmetric_difference(env, tuple(dataset1), tuple(dataset2))) == expected[2]
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
+def test_symmetric_difference(dataset1, dataset2, expected):
+ assert_lists_contain_same_elements(ms.symmetric_difference(env, dataset1, dataset2), expected[2])
class TestLogarithm:
diff --git a/test/units/plugins/inventory/test_constructed.py b/test/units/plugins/inventory/test_constructed.py
index 581e025..8ae78f1 100644
--- a/test/units/plugins/inventory/test_constructed.py
+++ b/test/units/plugins/inventory/test_constructed.py
@@ -194,11 +194,11 @@ def test_parent_group_templating_error(inventory_module):
'parent_group': '{{ location.barn-yard }}'
}
]
- with pytest.raises(AnsibleParserError) as err_message:
+ with pytest.raises(AnsibleParserError) as ex:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
- assert 'Could not generate parent group' in err_message
+ assert 'Could not generate parent group' in str(ex.value)
# invalid parent group did not raise an exception with strict=False
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
@@ -213,17 +213,17 @@ def test_keyed_group_exclusive_argument(inventory_module):
host = inventory_module.inventory.get_host('cow')
keyed_groups = [
{
- 'key': 'tag',
+ 'key': 'nickname',
'separator': '_',
'default_value': 'default_value_name',
'trailing_separator': True
}
]
- with pytest.raises(AnsibleParserError) as err_message:
+ with pytest.raises(AnsibleParserError) as ex:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
- assert 'parameters are mutually exclusive' in err_message
+ assert 'parameters are mutually exclusive' in str(ex.value)
def test_keyed_group_empty_value(inventory_module):
diff --git a/test/units/plugins/inventory/test_inventory.py b/test/units/plugins/inventory/test_inventory.py
index df24607..fb5342a 100644
--- a/test/units/plugins/inventory/test_inventory.py
+++ b/test/units/plugins/inventory/test_inventory.py
@@ -27,7 +27,7 @@ from unittest import mock
from ansible import constants as C
from units.compat import unittest
from ansible.module_utils.six import string_types
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from units.mock.path import mock_unfrackpath_noop
from ansible.inventory.manager import InventoryManager, split_host_pattern
diff --git a/test/units/plugins/inventory/test_script.py b/test/units/plugins/inventory/test_script.py
index 9f75199..89eb4f5 100644
--- a/test/units/plugins/inventory/test_script.py
+++ b/test/units/plugins/inventory/test_script.py
@@ -28,7 +28,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.loader import PluginLoader
from units.compat import unittest
-from ansible.module_utils._text import to_bytes, to_native
+from ansible.module_utils.common.text.converters import to_bytes, to_native
class TestInventoryModule(unittest.TestCase):
@@ -103,3 +103,11 @@ class TestInventoryModule(unittest.TestCase):
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == to_native("failed to parse executable inventory script results from "
"/foo/bar/foobar.py: needs to be a json dict\ndummyédata\n")
+
+ def test_get_host_variables_subprocess_script_raises_error(self):
+ self.popen_result.returncode = 1
+ self.popen_result.stderr = to_bytes("dummyéerror")
+
+ with pytest.raises(AnsibleError) as e:
+ self.inventory_module.get_host_variables('/foo/bar/foobar.py', 'dummy host')
+ assert e.value.message == "Inventory script (/foo/bar/foobar.py) had an execution error: dummyéerror"
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index 318bc10..685f2ce 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -23,7 +23,7 @@ __metaclass__ = type
try:
import passlib
from passlib.handlers import pbkdf2
-except ImportError:
+except ImportError: # pragma: nocover
passlib = None
pbkdf2 = None
@@ -36,7 +36,7 @@ from unittest.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
-from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common.text.converters import to_bytes
from ansible.plugins.loader import PluginLoader, lookup_loader
from ansible.plugins.lookup import password
@@ -416,8 +416,6 @@ class BaseTestLookupModule(unittest.TestCase):
password.os.open = lambda path, flag: None
self.os_close = password.os.close
password.os.close = lambda fd: None
- self.os_remove = password.os.remove
- password.os.remove = lambda path: None
self.makedirs_safe = password.makedirs_safe
password.makedirs_safe = lambda path, mode: None
@@ -425,7 +423,6 @@ class BaseTestLookupModule(unittest.TestCase):
password.os.path.exists = self.os_path_exists
password.os.open = self.os_open
password.os.close = self.os_close
- password.os.remove = self.os_remove
password.makedirs_safe = self.makedirs_safe
@@ -467,23 +464,17 @@ class TestLookupModuleWithoutPasslib(BaseTestLookupModule):
def test_lock_been_held(self, mock_sleep):
# pretend the lock file is here
password.os.path.exists = lambda x: True
- try:
+ with pytest.raises(AnsibleError):
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
# should timeout here
- results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
- self.fail("Lookup didn't timeout when lock already been held")
- except AnsibleError:
- pass
+ self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
def test_lock_not_been_held(self):
# pretend now there is password file but no lock
password.os.path.exists = lambda x: x == to_bytes('/path/to/somewhere')
- try:
- with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
- # should not timeout here
- results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
- except AnsibleError:
- self.fail('Lookup timeouts when lock is free')
+ with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
+ # should not timeout here
+ results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
for result in results:
self.assertEqual(result, u'hunter42')
@@ -531,10 +522,8 @@ class TestLookupModuleWithPasslib(BaseTestLookupModule):
self.assertEqual(int(str_parts[2]), crypt_parts['rounds'])
self.assertIsInstance(result, text_type)
- @patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
- def test_password_already_created_encrypt(self, mock_get_paths, mock_write_file):
- mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
+ def test_password_already_created_encrypt(self, mock_write_file):
password.os.path.exists = lambda x: x == to_bytes('/path/to/somewhere')
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
@@ -542,6 +531,9 @@ class TestLookupModuleWithPasslib(BaseTestLookupModule):
for result in results:
self.assertEqual(result, u'$pbkdf2-sha256$20000$ODc2NTQzMjE$Uikde0cv0BKaRaAXMrUQB.zvG4GmnjClwjghwIRf2gU')
+ # Assert the password file is not rewritten
+ mock_write_file.assert_not_called()
+
@pytest.mark.skipif(passlib is None, reason='passlib must be installed to run these tests')
class TestLookupModuleWithPasslibWrappedAlgo(BaseTestLookupModule):
diff --git a/test/units/plugins/strategy/test_strategy.py b/test/units/plugins/strategy/test_strategy.py
deleted file mode 100644
index f935f4b..0000000
--- a/test/units/plugins/strategy/test_strategy.py
+++ /dev/null
@@ -1,492 +0,0 @@
-# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from units.mock.loader import DictDataLoader
-import uuid
-
-from units.compat import unittest
-from unittest.mock import patch, MagicMock
-from ansible.executor.process.worker import WorkerProcess
-from ansible.executor.task_queue_manager import TaskQueueManager
-from ansible.executor.task_result import TaskResult
-from ansible.inventory.host import Host
-from ansible.module_utils.six.moves import queue as Queue
-from ansible.playbook.block import Block
-from ansible.playbook.handler import Handler
-from ansible.plugins.strategy import StrategyBase
-
-import pytest
-
-pytestmark = pytest.mark.skipif(True, reason="Temporarily disabled due to fragile tests that need rewritten")
-
-
-class TestStrategyBase(unittest.TestCase):
-
- def test_strategy_base_init(self):
- queue_items = []
-
- def _queue_empty(*args, **kwargs):
- return len(queue_items) == 0
-
- def _queue_get(*args, **kwargs):
- if len(queue_items) == 0:
- raise Queue.Empty
- else:
- return queue_items.pop()
-
- def _queue_put(item, *args, **kwargs):
- queue_items.append(item)
-
- mock_queue = MagicMock()
- mock_queue.empty.side_effect = _queue_empty
- mock_queue.get.side_effect = _queue_get
- mock_queue.put.side_effect = _queue_put
-
- mock_tqm = MagicMock(TaskQueueManager)
- mock_tqm._final_q = mock_queue
- mock_tqm._workers = []
- strategy_base = StrategyBase(tqm=mock_tqm)
- strategy_base.cleanup()
-
- def test_strategy_base_run(self):
- queue_items = []
-
- def _queue_empty(*args, **kwargs):
- return len(queue_items) == 0
-
- def _queue_get(*args, **kwargs):
- if len(queue_items) == 0:
- raise Queue.Empty
- else:
- return queue_items.pop()
-
- def _queue_put(item, *args, **kwargs):
- queue_items.append(item)
-
- mock_queue = MagicMock()
- mock_queue.empty.side_effect = _queue_empty
- mock_queue.get.side_effect = _queue_get
- mock_queue.put.side_effect = _queue_put
-
- mock_tqm = MagicMock(TaskQueueManager)
- mock_tqm._final_q = mock_queue
- mock_tqm._stats = MagicMock()
- mock_tqm.send_callback.return_value = None
-
- for attr in ('RUN_OK', 'RUN_ERROR', 'RUN_FAILED_HOSTS', 'RUN_UNREACHABLE_HOSTS'):
- setattr(mock_tqm, attr, getattr(TaskQueueManager, attr))
-
- mock_iterator = MagicMock()
- mock_iterator._play = MagicMock()
- mock_iterator._play.handlers = []
-
- mock_play_context = MagicMock()
-
- mock_tqm._failed_hosts = dict()
- mock_tqm._unreachable_hosts = dict()
- mock_tqm._workers = []
- strategy_base = StrategyBase(tqm=mock_tqm)
-
- mock_host = MagicMock()
- mock_host.name = 'host1'
-
- self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context), mock_tqm.RUN_OK)
- self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=TaskQueueManager.RUN_ERROR), mock_tqm.RUN_ERROR)
- mock_tqm._failed_hosts = dict(host1=True)
- mock_iterator.get_failed_hosts.return_value = [mock_host]
- self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_FAILED_HOSTS)
- mock_tqm._unreachable_hosts = dict(host1=True)
- mock_iterator.get_failed_hosts.return_value = []
- self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_UNREACHABLE_HOSTS)
- strategy_base.cleanup()
-
- def test_strategy_base_get_hosts(self):
- queue_items = []
-
- def _queue_empty(*args, **kwargs):
- return len(queue_items) == 0
-
- def _queue_get(*args, **kwargs):
- if len(queue_items) == 0:
- raise Queue.Empty
- else:
- return queue_items.pop()
-
- def _queue_put(item, *args, **kwargs):
- queue_items.append(item)
-
- mock_queue = MagicMock()
- mock_queue.empty.side_effect = _queue_empty
- mock_queue.get.side_effect = _queue_get
- mock_queue.put.side_effect = _queue_put
-
- mock_hosts = []
- for i in range(0, 5):
- mock_host = MagicMock()
- mock_host.name = "host%02d" % (i + 1)
- mock_host.has_hostkey = True
- mock_hosts.append(mock_host)
-
- mock_hosts_names = [h.name for h in mock_hosts]
-
- mock_inventory = MagicMock()
- mock_inventory.get_hosts.return_value = mock_hosts
-
- mock_tqm = MagicMock()
- mock_tqm._final_q = mock_queue
- mock_tqm.get_inventory.return_value = mock_inventory
-
- mock_play = MagicMock()
- mock_play.hosts = ["host%02d" % (i + 1) for i in range(0, 5)]
-
- strategy_base = StrategyBase(tqm=mock_tqm)
- strategy_base._hosts_cache = strategy_base._hosts_cache_all = mock_hosts_names
-
- mock_tqm._failed_hosts = []
- mock_tqm._unreachable_hosts = []
- self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts])
-
- mock_tqm._failed_hosts = ["host01"]
- self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts[1:]])
- self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0].name])
-
- mock_tqm._unreachable_hosts = ["host02"]
- self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts[2:]])
- strategy_base.cleanup()
-
- @patch.object(WorkerProcess, 'run')
- def test_strategy_base_queue_task(self, mock_worker):
- def fake_run(self):
- return
-
- mock_worker.run.side_effect = fake_run
-
- fake_loader = DictDataLoader()
- mock_var_manager = MagicMock()
- mock_host = MagicMock()
- mock_host.get_vars.return_value = dict()
- mock_host.has_hostkey = True
- mock_inventory = MagicMock()
- mock_inventory.get.return_value = mock_host
-
- tqm = TaskQueueManager(
- inventory=mock_inventory,
- variable_manager=mock_var_manager,
- loader=fake_loader,
- passwords=None,
- forks=3,
- )
- tqm._initialize_processes(3)
- tqm.hostvars = dict()
-
- mock_task = MagicMock()
- mock_task._uuid = 'abcd'
- mock_task.throttle = 0
-
- try:
- strategy_base = StrategyBase(tqm=tqm)
- strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
- self.assertEqual(strategy_base._cur_worker, 1)
- self.assertEqual(strategy_base._pending_results, 1)
- strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
- self.assertEqual(strategy_base._cur_worker, 2)
- self.assertEqual(strategy_base._pending_results, 2)
- strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
- self.assertEqual(strategy_base._cur_worker, 0)
- self.assertEqual(strategy_base._pending_results, 3)
- finally:
- tqm.cleanup()
-
- def test_strategy_base_process_pending_results(self):
- mock_tqm = MagicMock()
- mock_tqm._terminated = False
- mock_tqm._failed_hosts = dict()
- mock_tqm._unreachable_hosts = dict()
- mock_tqm.send_callback.return_value = None
-
- queue_items = []
-
- def _queue_empty(*args, **kwargs):
- return len(queue_items) == 0
-
- def _queue_get(*args, **kwargs):
- if len(queue_items) == 0:
- raise Queue.Empty
- else:
- return queue_items.pop()
-
- def _queue_put(item, *args, **kwargs):
- queue_items.append(item)
-
- mock_queue = MagicMock()
- mock_queue.empty.side_effect = _queue_empty
- mock_queue.get.side_effect = _queue_get
- mock_queue.put.side_effect = _queue_put
- mock_tqm._final_q = mock_queue
-
- mock_tqm._stats = MagicMock()
- mock_tqm._stats.increment.return_value = None
-
- mock_play = MagicMock()
-
- mock_host = MagicMock()
- mock_host.name = 'test01'
- mock_host.vars = dict()
- mock_host.get_vars.return_value = dict()
- mock_host.has_hostkey = True
-
- mock_task = MagicMock()
- mock_task._role = None
- mock_task._parent = None
- mock_task.ignore_errors = False
- mock_task.ignore_unreachable = False
- mock_task._uuid = str(uuid.uuid4())
- mock_task.loop = None
- mock_task.copy.return_value = mock_task
-
- mock_handler_task = Handler()
- mock_handler_task.name = 'test handler'
- mock_handler_task.action = 'foo'
- mock_handler_task._parent = None
- mock_handler_task._uuid = 'xxxxxxxxxxxxx'
-
- mock_iterator = MagicMock()
- mock_iterator._play = mock_play
- mock_iterator.mark_host_failed.return_value = None
- mock_iterator.get_next_task_for_host.return_value = (None, None)
-
- mock_handler_block = MagicMock()
- mock_handler_block.name = '' # implicit unnamed block
- mock_handler_block.block = [mock_handler_task]
- mock_handler_block.rescue = []
- mock_handler_block.always = []
- mock_play.handlers = [mock_handler_block]
-
- mock_group = MagicMock()
- mock_group.add_host.return_value = None
-
- def _get_host(host_name):
- if host_name == 'test01':
- return mock_host
- return None
-
- def _get_group(group_name):
- if group_name in ('all', 'foo'):
- return mock_group
- return None
-
- mock_inventory = MagicMock()
- mock_inventory._hosts_cache = dict()
- mock_inventory.hosts.return_value = mock_host
- mock_inventory.get_host.side_effect = _get_host
- mock_inventory.get_group.side_effect = _get_group
- mock_inventory.clear_pattern_cache.return_value = None
- mock_inventory.get_host_vars.return_value = {}
- mock_inventory.hosts.get.return_value = mock_host
-
- mock_var_mgr = MagicMock()
- mock_var_mgr.set_host_variable.return_value = None
- mock_var_mgr.set_host_facts.return_value = None
- mock_var_mgr.get_vars.return_value = dict()
-
- strategy_base = StrategyBase(tqm=mock_tqm)
- strategy_base._inventory = mock_inventory
- strategy_base._variable_manager = mock_var_mgr
- strategy_base._blocked_hosts = dict()
-
- def _has_dead_workers():
- return False
-
- strategy_base._tqm.has_dead_workers.side_effect = _has_dead_workers
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 0)
-
- task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True))
- queue_items.append(task_result)
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
-
- def mock_queued_task_cache():
- return {
- (mock_host.name, mock_task._uuid): {
- 'task': mock_task,
- 'host': mock_host,
- 'task_vars': {},
- 'play_context': {},
- }
- }
-
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0], task_result)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
-
- task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"failed":true}')
- queue_items.append(task_result)
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- mock_iterator.is_failed.return_value = True
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0], task_result)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
- # self.assertIn('test01', mock_tqm._failed_hosts)
- # del mock_tqm._failed_hosts['test01']
- mock_iterator.is_failed.return_value = False
-
- task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"unreachable": true}')
- queue_items.append(task_result)
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0], task_result)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
- self.assertIn('test01', mock_tqm._unreachable_hosts)
- del mock_tqm._unreachable_hosts['test01']
-
- task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"skipped": true}')
- queue_items.append(task_result)
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0], task_result)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
-
- queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
-
- queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_group=dict(group_name='foo'))))
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
-
- queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True, _ansible_notify=['test handler'])))
- strategy_base._blocked_hosts['test01'] = True
- strategy_base._pending_results = 1
- strategy_base._queued_task_cache = mock_queued_task_cache()
- results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
- self.assertEqual(len(results), 1)
- self.assertEqual(strategy_base._pending_results, 0)
- self.assertNotIn('test01', strategy_base._blocked_hosts)
- self.assertEqual(mock_iterator._play.handlers[0].block[0], mock_handler_task)
-
- # queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
- # results = strategy_base._process_pending_results(iterator=mock_iterator)
- # self.assertEqual(len(results), 0)
- # self.assertEqual(strategy_base._pending_results, 1)
-
- # queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
- # results = strategy_base._process_pending_results(iterator=mock_iterator)
- # self.assertEqual(len(results), 0)
- # self.assertEqual(strategy_base._pending_results, 1)
-
- # queue_items.append(('bad'))
- # self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
- strategy_base.cleanup()
-
- def test_strategy_base_load_included_file(self):
- fake_loader = DictDataLoader({
- "test.yml": """
- - debug: msg='foo'
- """,
- "bad.yml": """
- """,
- })
-
- queue_items = []
-
- def _queue_empty(*args, **kwargs):
- return len(queue_items) == 0
-
- def _queue_get(*args, **kwargs):
- if len(queue_items) == 0:
- raise Queue.Empty
- else:
- return queue_items.pop()
-
- def _queue_put(item, *args, **kwargs):
- queue_items.append(item)
-
- mock_queue = MagicMock()
- mock_queue.empty.side_effect = _queue_empty
- mock_queue.get.side_effect = _queue_get
- mock_queue.put.side_effect = _queue_put
-
- mock_tqm = MagicMock()
- mock_tqm._final_q = mock_queue
-
- strategy_base = StrategyBase(tqm=mock_tqm)
- strategy_base._loader = fake_loader
- strategy_base.cleanup()
-
- mock_play = MagicMock()
-
- mock_block = MagicMock()
- mock_block._play = mock_play
- mock_block.vars = dict()
-
- mock_task = MagicMock()
- mock_task._block = mock_block
- mock_task._role = None
-
- # NOTE Mocking calls below to account for passing parent_block=ti_copy.build_parent_block()
- # into load_list_of_blocks() in _load_included_file. Not doing so meant that retrieving
- # `collection` attr from parent would result in getting MagicMock instance
- # instead of an empty list.
- mock_task._parent = MagicMock()
- mock_task.copy.return_value = mock_task
- mock_task.build_parent_block.return_value = mock_block
- mock_block._get_parent_attribute.return_value = None
-
- mock_iterator = MagicMock()
- mock_iterator.mark_host_failed.return_value = None
-
- mock_inc_file = MagicMock()
- mock_inc_file._task = mock_task
-
- mock_inc_file._filename = "test.yml"
- res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
- self.assertEqual(len(res), 1)
- self.assertTrue(isinstance(res[0], Block))
-
- mock_inc_file._filename = "bad.yml"
- res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
- self.assertEqual(res, [])
diff --git a/test/units/plugins/test_plugins.py b/test/units/plugins/test_plugins.py
index be123b1..ba2ad2b 100644
--- a/test/units/plugins/test_plugins.py
+++ b/test/units/plugins/test_plugins.py
@@ -46,14 +46,14 @@ class TestErrors(unittest.TestCase):
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
- foo = MagicMock()
- bar = MagicMock()
+ foo_pkg = MagicMock()
+ bar_pkg = MagicMock()
bam = MagicMock()
bam.__file__ = '/path/to/my/foo/bar/bam/__init__.py'
- bar.bam = bam
- foo.return_value.bar = bar
+ bar_pkg.bam = bam
+ foo_pkg.return_value.bar = bar_pkg
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
- with patch('builtins.__import__', foo):
+ with patch('builtins.__import__', foo_pkg):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
diff --git a/test/units/requirements.txt b/test/units/requirements.txt
index 1822ada..c77c55c 100644
--- a/test/units/requirements.txt
+++ b/test/units/requirements.txt
@@ -1,4 +1,4 @@
-bcrypt ; python_version >= '3.9' # controller only
-passlib ; python_version >= '3.9' # controller only
-pexpect ; python_version >= '3.9' # controller only
-pywinrm ; python_version >= '3.9' # controller only
+bcrypt ; python_version >= '3.10' # controller only
+passlib ; python_version >= '3.10' # controller only
+pexpect ; python_version >= '3.10' # controller only
+pywinrm ; python_version >= '3.10' # controller only
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index 6747f76..02840e1 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -22,11 +22,10 @@ __metaclass__ = type
from jinja2.runtime import Context
from units.compat import unittest
-from unittest.mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
-from ansible.module_utils.six import string_types
+from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
@@ -34,6 +33,7 @@ from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
+ init_plugin_loader()
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
@@ -62,14 +62,6 @@ class BaseTemplar(object):
return self._ansible_context._is_unsafe(obj)
-# class used for testing arbitrary objects passed to template
-class SomeClass(object):
- foo = 'bar'
-
- def __init__(self):
- self.blip = 'blip'
-
-
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
@@ -266,8 +258,6 @@ class TestTemplarMisc(BaseTemplar, unittest.TestCase):
templar.available_variables = "foo=bam"
except AssertionError:
pass
- except Exception as e:
- self.fail(e)
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
diff --git a/test/units/template/test_vars.py b/test/units/template/test_vars.py
index 514104f..f43cfac 100644
--- a/test/units/template/test_vars.py
+++ b/test/units/template/test_vars.py
@@ -19,23 +19,16 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat import unittest
-from unittest.mock import MagicMock
-
+from ansible.template import Templar
from ansible.template.vars import AnsibleJ2Vars
-class TestVars(unittest.TestCase):
- def setUp(self):
- self.mock_templar = MagicMock(name='mock_templar')
+def test_globals_empty():
+ assert isinstance(dict(AnsibleJ2Vars(Templar(None), {})), dict)
- def test_globals_empty(self):
- ajvars = AnsibleJ2Vars(self.mock_templar, {})
- res = dict(ajvars)
- self.assertIsInstance(res, dict)
- def test_globals(self):
- res = dict(AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]}))
- self.assertIsInstance(res, dict)
- self.assertIn('foo', res)
- self.assertEqual(res['foo'], 'bar')
+def test_globals():
+ res = dict(AnsibleJ2Vars(Templar(None), {'foo': 'bar', 'blip': [1, 2, 3]}))
+ assert isinstance(res, dict)
+ assert 'foo' in res
+ assert res['foo'] == 'bar'
diff --git a/test/units/test_constants.py b/test/units/test_constants.py
deleted file mode 100644
index a206d23..0000000
--- a/test/units/test_constants.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-# (c) 2017 Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import pwd
-import os
-
-import pytest
-
-from ansible import constants
-from ansible.module_utils.six import StringIO
-from ansible.module_utils.six.moves import configparser
-from ansible.module_utils._text import to_text
-
-
-@pytest.fixture
-def cfgparser():
- CFGDATA = StringIO("""
-[defaults]
-defaults_one = 'data_defaults_one'
-
-[level1]
-level1_one = 'data_level1_one'
- """)
- p = configparser.ConfigParser()
- p.readfp(CFGDATA)
- return p
-
-
-@pytest.fixture
-def user():
- user = {}
- user['uid'] = os.geteuid()
-
- pwd_entry = pwd.getpwuid(user['uid'])
- user['username'] = pwd_entry.pw_name
- user['home'] = pwd_entry.pw_dir
-
- return user
-
-
-@pytest.fixture
-def cfg_file():
- data = '/ansible/test/cfg/path'
- old_cfg_file = constants.CONFIG_FILE
- constants.CONFIG_FILE = os.path.join(data, 'ansible.cfg')
- yield data
-
- constants.CONFIG_FILE = old_cfg_file
-
-
-@pytest.fixture
-def null_cfg_file():
- old_cfg_file = constants.CONFIG_FILE
- del constants.CONFIG_FILE
- yield
-
- constants.CONFIG_FILE = old_cfg_file
-
-
-@pytest.fixture
-def cwd():
- data = '/ansible/test/cwd/'
- old_cwd = os.getcwd
- os.getcwd = lambda: data
-
- old_cwdu = None
- if hasattr(os, 'getcwdu'):
- old_cwdu = os.getcwdu
- os.getcwdu = lambda: to_text(data)
-
- yield data
-
- os.getcwd = old_cwd
- if hasattr(os, 'getcwdu'):
- os.getcwdu = old_cwdu
diff --git a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
index 9d30580..a85f422 100644
--- a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
+++ b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ..module_utils.my_util import question
+from ..module_utils.my_util import question # pylint: disable=unused-import
def action_code():
diff --git a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
index 35e1381..463b133 100644
--- a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
+++ b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
@@ -1,4 +1,4 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from .my_util import question
+from .my_util import question # pylint: disable=unused-import
diff --git a/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py b/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py
diff --git a/test/units/utils/collection_loader/test_collection_loader.py b/test/units/utils/collection_loader/test_collection_loader.py
index f7050dc..feaaf97 100644
--- a/test/units/utils/collection_loader/test_collection_loader.py
+++ b/test/units/utils/collection_loader/test_collection_loader.py
@@ -13,7 +13,7 @@ from ansible.modules import ping as ping_module
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import (
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
- _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsiblePathHookFinder,
+ _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsibleNSTraversable, _AnsiblePathHookFinder,
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
)
from ansible.utils.collection_loader._collection_config import _EventSource
@@ -29,8 +29,16 @@ def teardown(*args, **kwargs):
# BEGIN STANDALONE TESTS - these exercise behaviors of the individual components without the import machinery
-@pytest.mark.skipif(not PY3, reason='Testing Python 2 codepath (find_module) on Python 3')
-def test_find_module_py3():
+@pytest.mark.filterwarnings(
+ 'ignore:'
+ r'find_module\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
+ ':DeprecationWarning',
+ 'ignore:'
+ r'FileFinder\.find_loader\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
+ ':DeprecationWarning',
+)
+@pytest.mark.skipif(not PY3 or sys.version_info >= (3, 12), reason='Testing Python 2 codepath (find_module) on Python 3, <= 3.11')
+def test_find_module_py3_lt_312():
dir_to_a_file = os.path.dirname(ping_module.__file__)
path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
@@ -40,6 +48,16 @@ def test_find_module_py3():
assert path_hook_finder.find_module('missing') is None
+@pytest.mark.skipif(sys.version_info < (3, 12), reason='Testing Python 2 codepath (find_module) on Python >= 3.12')
+def test_find_module_py3_gt_311():
+ dir_to_a_file = os.path.dirname(ping_module.__file__)
+ path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
+
+ # setuptools may fall back to find_module on Python 3 if find_spec returns None
+ # see https://github.com/pypa/setuptools/pull/2918
+ assert path_hook_finder.find_spec('missing') is None
+
+
def test_finder_setup():
# ensure scalar path is listified
f = _AnsibleCollectionFinder(paths='/bogus/bogus')
@@ -828,6 +846,53 @@ def test_collectionref_components_invalid(name, subdirs, resource, ref_type, exp
assert re.search(expected_error_expression, str(curerr.value))
+@pytest.mark.skipif(not PY3, reason='importlib.resources only supported for py3')
+def test_importlib_resources():
+ if sys.version_info < (3, 10):
+ from importlib_resources import files
+ else:
+ from importlib.resources import files
+ from pathlib import Path
+
+ f = get_default_finder()
+ reset_collections_loader_state(f)
+
+ ansible_collections_ns = files('ansible_collections')
+ ansible_ns = files('ansible_collections.ansible')
+ testns = files('ansible_collections.testns')
+ testcoll = files('ansible_collections.testns.testcoll')
+ testcoll2 = files('ansible_collections.testns.testcoll2')
+ module_utils = files('ansible_collections.testns.testcoll.plugins.module_utils')
+
+ assert isinstance(ansible_collections_ns, _AnsibleNSTraversable)
+ assert isinstance(ansible_ns, _AnsibleNSTraversable)
+ assert isinstance(testcoll, Path)
+ assert isinstance(module_utils, Path)
+
+ assert ansible_collections_ns.is_dir()
+ assert ansible_ns.is_dir()
+ assert testcoll.is_dir()
+ assert module_utils.is_dir()
+
+ first_path = Path(default_test_collection_paths[0])
+ second_path = Path(default_test_collection_paths[1])
+ testns_paths = []
+ ansible_ns_paths = []
+ for path in default_test_collection_paths[:2]:
+ ansible_ns_paths.append(Path(path) / 'ansible_collections' / 'ansible')
+ testns_paths.append(Path(path) / 'ansible_collections' / 'testns')
+
+ assert testns._paths == testns_paths
+ # NOTE: The next two asserts check for subsets to accommodate running the unit tests when externally installed collections are available.
+ assert set(ansible_ns_paths).issubset(ansible_ns._paths)
+ assert set(Path(p) / 'ansible_collections' for p in default_test_collection_paths[:2]).issubset(ansible_collections_ns._paths)
+ assert testcoll2 == second_path / 'ansible_collections' / 'testns' / 'testcoll2'
+
+ assert {p.name for p in module_utils.glob('*.py')} == {'__init__.py', 'my_other_util.py', 'my_util.py'}
+ nestcoll_mu_init = first_path / 'ansible_collections' / 'testns' / 'testcoll' / 'plugins' / 'module_utils' / '__init__.py'
+ assert next(module_utils.glob('__init__.py')) == nestcoll_mu_init
+
+
# BEGIN TEST SUPPORT
default_test_collection_paths = [
diff --git a/test/units/utils/display/test_broken_cowsay.py b/test/units/utils/display/test_broken_cowsay.py
index d888010..96157e1 100644
--- a/test/units/utils/display/test_broken_cowsay.py
+++ b/test/units/utils/display/test_broken_cowsay.py
@@ -12,16 +12,13 @@ from unittest.mock import MagicMock
def test_display_with_fake_cowsay_binary(capsys, mocker):
- mocker.patch("ansible.constants.ANSIBLE_COW_PATH", "./cowsay.sh")
+ display = Display()
- def mock_communicate(input=None, timeout=None):
- return b"", b""
+ mocker.patch("ansible.constants.ANSIBLE_COW_PATH", "./cowsay.sh")
mock_popen = MagicMock()
- mock_popen.return_value.communicate = mock_communicate
mock_popen.return_value.returncode = 1
mocker.patch("subprocess.Popen", mock_popen)
- display = Display()
assert not hasattr(display, "cows_available")
assert display.b_cowsay is None
diff --git a/test/units/plugins/action/test_pause.py b/test/units/utils/display/test_curses.py
index 8ad6db7..05efc41 100644
--- a/test/units/plugins/action/test_pause.py
+++ b/test/units/utils/display/test_curses.py
@@ -11,16 +11,14 @@ import io
import pytest
import sys
-from ansible.plugins.action import pause # noqa: F401
-from ansible.module_utils.six import PY2
+import ansible.utils.display # make available for monkeypatch
+assert ansible.utils.display # avoid reporting as unused
builtin_import = 'builtins.__import__'
-if PY2:
- builtin_import = '__builtin__.__import__'
def test_pause_curses_tigetstr_none(mocker, monkeypatch):
- monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
+ monkeypatch.delitem(sys.modules, 'ansible.utils.display')
dunder_import = __import__
@@ -35,7 +33,11 @@ def test_pause_curses_tigetstr_none(mocker, monkeypatch):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.plugins.action.pause')
+ mod = importlib.import_module('ansible.utils.display')
+
+ assert mod.HAS_CURSES is True
+
+ mod.setupterm()
assert mod.HAS_CURSES is True
assert mod.MOVE_TO_BOL == b'\r'
@@ -43,7 +45,7 @@ def test_pause_curses_tigetstr_none(mocker, monkeypatch):
def test_pause_missing_curses(mocker, monkeypatch):
- monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
+ monkeypatch.delitem(sys.modules, 'ansible.utils.display')
dunder_import = __import__
@@ -55,10 +57,12 @@ def test_pause_missing_curses(mocker, monkeypatch):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.plugins.action.pause')
+ mod = importlib.import_module('ansible.utils.display')
+
+ assert mod.HAS_CURSES is False
with pytest.raises(AttributeError):
- mod.curses
+ assert mod.curses
assert mod.HAS_CURSES is False
assert mod.MOVE_TO_BOL == b'\r'
@@ -67,7 +71,7 @@ def test_pause_missing_curses(mocker, monkeypatch):
@pytest.mark.parametrize('exc', (curses.error, TypeError, io.UnsupportedOperation))
def test_pause_curses_setupterm_error(mocker, monkeypatch, exc):
- monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
+ monkeypatch.delitem(sys.modules, 'ansible.utils.display')
dunder_import = __import__
@@ -82,7 +86,11 @@ def test_pause_curses_setupterm_error(mocker, monkeypatch, exc):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.plugins.action.pause')
+ mod = importlib.import_module('ansible.utils.display')
+
+ assert mod.HAS_CURSES is True
+
+ mod.setupterm()
assert mod.HAS_CURSES is False
assert mod.MOVE_TO_BOL == b'\r'
diff --git a/test/units/utils/test_cleanup_tmp_file.py b/test/units/utils/test_cleanup_tmp_file.py
index 2a44a55..35374f4 100644
--- a/test/units/utils/test_cleanup_tmp_file.py
+++ b/test/units/utils/test_cleanup_tmp_file.py
@@ -6,16 +6,11 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
-import pytest
import tempfile
from ansible.utils.path import cleanup_tmp_file
-def raise_error():
- raise OSError
-
-
def test_cleanup_tmp_file_file():
tmp_fd, tmp = tempfile.mkstemp()
cleanup_tmp_file(tmp)
@@ -34,15 +29,21 @@ def test_cleanup_tmp_file_nonexistant():
assert None is cleanup_tmp_file('nope')
-def test_cleanup_tmp_file_failure(mocker):
+def test_cleanup_tmp_file_failure(mocker, capsys):
tmp = tempfile.mkdtemp()
- with pytest.raises(Exception):
- mocker.patch('shutil.rmtree', side_effect=raise_error())
- cleanup_tmp_file(tmp)
+ rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
+ cleanup_tmp_file(tmp)
+ out, err = capsys.readouterr()
+ assert out == ''
+ assert err == ''
+ rmtree.assert_called_once()
def test_cleanup_tmp_file_failure_warning(mocker, capsys):
tmp = tempfile.mkdtemp()
- with pytest.raises(Exception):
- mocker.patch('shutil.rmtree', side_effect=raise_error())
- cleanup_tmp_file(tmp, warn=True)
+ rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
+ cleanup_tmp_file(tmp, warn=True)
+ out, err = capsys.readouterr()
+ assert out == 'Unable to remove temporary file test induced failure\n'
+ assert err == ''
+ rmtree.assert_called_once()
diff --git a/test/units/utils/test_display.py b/test/units/utils/test_display.py
index 6b1914b..80b7a09 100644
--- a/test/units/utils/test_display.py
+++ b/test/units/utils/test_display.py
@@ -18,16 +18,14 @@ from ansible.utils.multiprocessing import context as multiprocessing_context
@pytest.fixture
def problematic_wcswidth_chars():
- problematic = []
- try:
- locale.setlocale(locale.LC_ALL, 'C.UTF-8')
- except Exception:
- return problematic
+ locale.setlocale(locale.LC_ALL, 'C.UTF-8')
candidates = set(chr(c) for c in range(sys.maxunicode) if unicodedata.category(chr(c)) == 'Cf')
- for c in candidates:
- if _LIBC.wcswidth(c, _MAX_INT) == -1:
- problematic.append(c)
+ problematic = [candidate for candidate in candidates if _LIBC.wcswidth(candidate, _MAX_INT) == -1]
+
+ if not problematic:
+ # Newer distributions (Ubuntu 22.04, Fedora 38) include a libc which does not report problematic characters.
+ pytest.skip("no problematic wcswidth chars found") # pragma: nocover
return problematic
@@ -54,9 +52,6 @@ def test_get_text_width():
def test_get_text_width_no_locale(problematic_wcswidth_chars):
- if not problematic_wcswidth_chars:
- pytest.skip("No problmatic wcswidth chars")
- locale.setlocale(locale.LC_ALL, 'C.UTF-8')
pytest.raises(EnvironmentError, get_text_width, problematic_wcswidth_chars[0])
@@ -108,9 +103,21 @@ def test_Display_display_fork():
display = Display()
display.set_queue(queue)
display.display('foo')
- queue.send_display.assert_called_once_with(
- 'foo', color=None, stderr=False, screen_only=False, log_only=False, newline=True
- )
+ queue.send_display.assert_called_once_with('display', 'foo')
+
+ p = multiprocessing_context.Process(target=test)
+ p.start()
+ p.join()
+ assert p.exitcode == 0
+
+
+def test_Display_display_warn_fork():
+ def test():
+ queue = MagicMock()
+ display = Display()
+ display.set_queue(queue)
+ display.warning('foo')
+ queue.send_display.assert_called_once_with('warning', 'foo')
p = multiprocessing_context.Process(target=test)
p.start()
diff --git a/test/units/utils/test_encrypt.py b/test/units/utils/test_encrypt.py
index 72fe3b0..be32579 100644
--- a/test/units/utils/test_encrypt.py
+++ b/test/units/utils/test_encrypt.py
@@ -27,17 +27,26 @@ class passlib_off(object):
def assert_hash(expected, secret, algorithm, **settings):
+ assert encrypt.do_encrypt(secret, algorithm, **settings) == expected
if encrypt.PASSLIB_AVAILABLE:
- assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
assert encrypt.PasslibHash(algorithm).hash(secret, **settings) == expected
else:
- assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
with pytest.raises(AnsibleError) as excinfo:
encrypt.PasslibHash(algorithm).hash(secret, **settings)
assert excinfo.value.args[0] == "passlib must be installed and usable to hash with '%s'" % algorithm
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
+def test_passlib_or_crypt():
+ with passlib_off():
+ expected = "$5$rounds=5000$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
+ assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected
+
+ expected = "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
+ assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected
+
+
+@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_encrypt_with_rounds_no_passlib():
with passlib_off():
assert_hash("$5$rounds=5000$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
diff --git a/test/units/utils/test_unsafe_proxy.py b/test/units/utils/test_unsafe_proxy.py
index ea653cf..55f1b6d 100644
--- a/test/units/utils/test_unsafe_proxy.py
+++ b/test/units/utils/test_unsafe_proxy.py
@@ -5,7 +5,9 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from ansible.module_utils.six import PY3
+import pathlib
+import sys
+
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
from ansible.module_utils.common.text.converters import to_text, to_bytes
@@ -19,10 +21,7 @@ def test_wrap_var_bytes():
def test_wrap_var_string():
- if PY3:
- assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
- else:
- assert isinstance(wrap_var('foo'), AnsibleUnsafeBytes)
+ assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
def test_wrap_var_dict():
@@ -95,12 +94,12 @@ def test_wrap_var_no_ref():
'text': 'text',
}
wrapped_thing = wrap_var(thing)
- thing is not wrapped_thing
- thing['foo'] is not wrapped_thing['foo']
- thing['bar'][0] is not wrapped_thing['bar'][0]
- thing['baz'][0] is not wrapped_thing['baz'][0]
- thing['none'] is not wrapped_thing['none']
- thing['text'] is not wrapped_thing['text']
+ assert thing is not wrapped_thing
+ assert thing['foo'] is not wrapped_thing['foo']
+ assert thing['bar'][0] is not wrapped_thing['bar'][0]
+ assert thing['baz'][0] is not wrapped_thing['baz'][0]
+ assert thing['none'] is wrapped_thing['none']
+ assert thing['text'] is not wrapped_thing['text']
def test_AnsibleUnsafeText():
@@ -119,3 +118,10 @@ def test_to_text_unsafe():
def test_to_bytes_unsafe():
assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
+
+
+def test_unsafe_with_sys_intern():
+ # Specifically this is actually about sys.intern, test of pathlib
+ # because that is a specific affected use
+ assert sys.intern(AnsibleUnsafeText('foo')) == 'foo'
+ assert pathlib.Path(AnsibleUnsafeText('/tmp')) == pathlib.Path('/tmp')
diff --git a/test/units/vars/test_module_response_deepcopy.py b/test/units/vars/test_module_response_deepcopy.py
index 78f9de0..3313dea 100644
--- a/test/units/vars/test_module_response_deepcopy.py
+++ b/test/units/vars/test_module_response_deepcopy.py
@@ -7,8 +7,6 @@ __metaclass__ = type
from ansible.vars.clean import module_response_deepcopy
-import pytest
-
def test_module_response_deepcopy_basic():
x = 42
@@ -37,15 +35,6 @@ def test_module_response_deepcopy_empty_tuple():
assert x is y
-@pytest.mark.skip(reason='No current support for this situation')
-def test_module_response_deepcopy_tuple():
- x = ([1, 2], 3)
- y = module_response_deepcopy(x)
- assert y == x
- assert x is not y
- assert x[0] is not y[0]
-
-
def test_module_response_deepcopy_tuple_of_immutables():
x = ((1, 2), 3)
y = module_response_deepcopy(x)
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index 67ec120..ee6de81 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -141,10 +141,8 @@ class TestVariableManager(unittest.TestCase):
return
# pylint: disable=unreachable
- '''
- Tests complex variations and combinations of get_vars() with different
- objects to modify the context under which variables are merged.
- '''
+ # Tests complex variations and combinations of get_vars() with different
+ # objects to modify the context under which variables are merged.
# FIXME: BCS makethiswork
# return True