diff options
Diffstat (limited to 'src/ansiblelint')
150 files changed, 24009 insertions, 0 deletions
diff --git a/src/ansiblelint/__init__.py b/src/ansiblelint/__init__.py new file mode 100644 index 0000000..0bd408f --- /dev/null +++ b/src/ansiblelint/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Main ansible-lint package.""" +from __future__ import annotations + +import ansiblelint._vendor +from ansiblelint.version import __version__ + +# make vendored top-level modules accessible EARLY + + +__all__ = ("__version__",) diff --git a/src/ansiblelint/__main__.py b/src/ansiblelint/__main__.py new file mode 100755 index 0000000..0437bc0 --- /dev/null +++ b/src/ansiblelint/__main__.py @@ -0,0 +1,468 @@ +#!/usr/bin/env python +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Command line implementation.""" + +from __future__ import annotations + +import errno +import logging +import os +import pathlib +import shutil +import site +import subprocess +import sys +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Callable, Iterator, TextIO + +from ansible_compat.config import ansible_version +from ansible_compat.prerun import get_cache_dir +from filelock import FileLock, Timeout + +from ansiblelint import cli +from ansiblelint._mockings import _perform_mockings_cleanup +from ansiblelint.app import get_app +from ansiblelint.color import ( + console, + console_options, + console_stderr, + reconfigure, + render_yaml, +) +from ansiblelint.config import get_version_warning, options +from ansiblelint.constants import EXIT_CONTROL_C_RC, GIT_CMD, LOCK_TIMEOUT_RC +from ansiblelint.file_utils import abspath, cwd, normpath +from ansiblelint.loaders import load_ignore_txt +from ansiblelint.skip_utils import normalize_tag +from ansiblelint.version import __version__ + +if TYPE_CHECKING: + from argparse import Namespace + + # RulesCollection must be imported lazily or ansible gets imported too early. + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import LintResult + + +_logger = logging.getLogger(__name__) + + +def initialize_logger(level: int = 0) -> None: + """Set up the global logging level based on the verbosity number.""" + # We are about to act on the root logger, which defaults to logging.WARNING. + # That is where our 0 (default) value comes from. + verbosity_map = { + -2: logging.CRITICAL, + -1: logging.ERROR, + 0: logging.WARNING, + 1: logging.INFO, + 2: logging.DEBUG, + } + + handler = logging.StreamHandler() + formatter = logging.Formatter("%(levelname)-8s %(message)s") + handler.setFormatter(formatter) + logger = logging.getLogger() + logger.addHandler(handler) + # Unknown logging level is treated as DEBUG + logging_level = verbosity_map.get(level, logging.DEBUG) + logger.setLevel(logging_level) + logging.captureWarnings(True) # pass all warnings.warn() messages through logging + # Use module-level _logger instance to validate it + _logger.debug("Logging initialized to level %s", logging_level) + + +def initialize_options(arguments: list[str] | None = None) -> None: + """Load config options and store them inside options module.""" + new_options = cli.get_config(arguments or []) + new_options.cwd = pathlib.Path.cwd() + + if new_options.colored is None: + new_options.colored = should_do_markup() + + # persist loaded configuration inside options module + for k, v in new_options.__dict__.items(): + setattr(options, k, v) + + # rename deprecated ids/tags to newer names + options.tags = [normalize_tag(tag) for tag in options.tags] + options.skip_list = [normalize_tag(tag) for tag in options.skip_list] + options.warn_list = [normalize_tag(tag) for tag in options.warn_list] + + options.configured = True + options.cache_dir = get_cache_dir(options.project_dir) + + # add a lock file so we do not have two instances running inside at the same time + os.makedirs(options.cache_dir, exist_ok=True) + + options.cache_dir_lock = None + if not options.offline: # pragma: no cover + options.cache_dir_lock = FileLock(f"{options.cache_dir}/.lock") + try: + options.cache_dir_lock.acquire(timeout=180) + except Timeout: # pragma: no cover + _logger.error( + "Timeout waiting for another instance of ansible-lint to release the lock." + ) + sys.exit(LOCK_TIMEOUT_RC) + + # Avoid extra output noise from Ansible about using devel versions + if "ANSIBLE_DEVEL_WARNING" not in os.environ: # pragma: no branch + os.environ["ANSIBLE_DEVEL_WARNING"] = "false" + + +def _do_list(rules: RulesCollection) -> int: + # On purpose lazy-imports to avoid pre-loading Ansible + # pylint: disable=import-outside-toplevel + from ansiblelint.generate_docs import ( + rules_as_docs, + rules_as_md, + rules_as_rich, + rules_as_str, + ) + + if options.list_rules: + _rule_format_map: dict[str, Callable[..., Any]] = { + "brief": rules_as_str, + "full": rules_as_rich, + "md": rules_as_md, + "docs": rules_as_docs, + } + + console.print( + _rule_format_map.get(options.format, rules_as_str)(rules), highlight=False + ) + return 0 + + if options.list_tags: + console.print(render_yaml(rules.list_tags())) + return 0 + + # we should not get here! + return 1 + + +# noinspection PyShadowingNames +def _do_transform(result: LintResult, opts: Namespace) -> None: + """Create and run Transformer.""" + if "yaml" in opts.skip_list: + # The transformer rewrites yaml files, but the user requested to skip + # the yaml rule or anything tagged with "yaml", so there is nothing to do. + return + + # On purpose lazy-imports to avoid loading transforms unless requested + # pylint: disable=import-outside-toplevel + from ansiblelint.transformer import Transformer + + transformer = Transformer(result, options) + + # this will mark any matches as fixed if the transforms repaired the issue + transformer.run() + + +def support_banner() -> None: + """Display support banner when running on unsupported platform.""" + if sys.version_info < (3, 9, 0): # pragma: no cover + prefix = "::warning::" if "GITHUB_ACTION" in os.environ else "WARNING: " + console_stderr.print( + f"{prefix}ansible-lint is no longer tested under Python {sys.version_info.major}.{sys.version_info.minor} and will soon require 3.9. Do not report bugs for this version.", + style="bold red", + ) + + +# pylint: disable=too-many-branches,too-many-statements +def main(argv: list[str] | None = None) -> int: # noqa: C901 + """Linter CLI entry point.""" + # alter PATH if needed (venv support) + path_inject() + + if argv is None: # pragma: no cover + argv = sys.argv + initialize_options(argv[1:]) + + console_options["force_terminal"] = options.colored + reconfigure(console_options) + + if options.version: + console.print( + f"ansible-lint [repr.number]{__version__}[/] using ansible [repr.number]{ansible_version()}[/]" + ) + msg = get_version_warning() + if msg: + console.print(msg) + support_banner() + sys.exit(0) + else: + support_banner() + + initialize_logger(options.verbosity) + _logger.debug("Options: %s", options) + _logger.debug(os.getcwd()) + + if options.progressive: + _logger.warning( + "Progressive mode is deprecated and will be removed in next major version, use ignore files instead: https://ansible-lint.readthedocs.io/configuring/#ignoring-rules-for-entire-files" + ) + + if not options.offline: + # pylint: disable=import-outside-toplevel + from ansiblelint.schemas import refresh_schemas + + refresh_schemas() + + # pylint: disable=import-outside-toplevel + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import _get_matches + + rules = RulesCollection(options.rulesdirs, profile_name=options.profile) + + if options.list_profiles: + from ansiblelint.generate_docs import profiles_as_rich + + console.print(profiles_as_rich()) + return 0 + + if options.list_rules or options.list_tags: + return _do_list(rules) + + app = get_app() + if isinstance(options.tags, str): + options.tags = options.tags.split(",") # pragma: no cover + result = _get_matches(rules, options) + + if options.write_list: + _do_transform(result, options) + + mark_as_success = True + if result.matches and options.progressive: + mark_as_success = False + _logger.info( + "Matches found, running again on previous revision in order to detect regressions" + ) + with _previous_revision(): + _logger.debug("Options: %s", options) + _logger.debug(os.getcwd()) + old_result = _get_matches(rules, options) + # remove old matches from current list + matches_delta = list(set(result.matches) - set(old_result.matches)) + if len(matches_delta) == 0: + _logger.warning( + "Total violations not increased since previous " + "commit, will mark result as success. (%s -> %s)", + len(old_result.matches), + len(matches_delta), + ) + mark_as_success = True + + ignored = 0 + for match in result.matches: + # if match is not new, mark is as ignored + if match not in matches_delta: + match.ignored = True + ignored += 1 + if ignored: + _logger.warning( + "Marked %s previously known violation(s) as ignored due to" + " progressive mode.", + ignored, + ) + + if options.strict and result.matches: + mark_as_success = False + + # Remove skip_list items from the result + result.matches = [m for m in result.matches if m.tag not in app.options.skip_list] + # Mark matches as ignored inside ignore file + ignore_map = load_ignore_txt() + for match in result.matches: + if match.tag in ignore_map[match.filename]: + match.ignored = True + + app.render_matches(result.matches) + + _perform_mockings_cleanup() + if options.cache_dir_lock: + options.cache_dir_lock.release() + pathlib.Path(options.cache_dir_lock.lock_file).unlink(missing_ok=True) + if options.mock_filters: + _logger.warning( + "The following filters were mocked during the run: %s", + ",".join(options.mock_filters), + ) + + return app.report_outcome(result, mark_as_success=mark_as_success) + + +@contextmanager +def _previous_revision() -> Iterator[None]: + """Create or update a temporary workdir containing the previous revision.""" + worktree_dir = f"{options.cache_dir}/old-rev" + # Update options.exclude_paths to include use the temporary workdir. + rel_exclude_paths = [normpath(p) for p in options.exclude_paths] + options.exclude_paths = [abspath(p, worktree_dir) for p in rel_exclude_paths] + revision = subprocess.run( + [*GIT_CMD, "rev-parse", "HEAD^1"], + check=True, + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ).stdout.strip() + _logger.info("Previous revision SHA: %s", revision) + path = pathlib.Path(worktree_dir) + if path.exists(): + shutil.rmtree(worktree_dir) + path.mkdir(parents=True, exist_ok=True) + # Run check will fail if worktree_dir already exists + # pylint: disable=subprocess-run-check + subprocess.run( + [*GIT_CMD, "worktree", "add", "-f", worktree_dir], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + with cwd(worktree_dir): + subprocess.run( + [*GIT_CMD, "checkout", revision], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + yield + finally: + options.exclude_paths = [abspath(p, os.getcwd()) for p in rel_exclude_paths] + + +def _run_cli_entrypoint() -> None: + """Invoke the main entrypoint with current CLI args. + + This function also processes the runtime exceptions. + """ + try: + sys.exit(main(sys.argv)) + except OSError as exc: + # NOTE: Only "broken pipe" is acceptable to ignore + if exc.errno != errno.EPIPE: # pragma: no cover + raise + except KeyboardInterrupt: # pragma: no cover + sys.exit(EXIT_CONTROL_C_RC) + except RuntimeError as exc: # pragma: no cover + raise SystemExit(exc) from exc + + +def path_inject() -> None: + """Add python interpreter path to top of PATH to fix outside venv calling.""" + # This make it possible to call ansible-lint that was installed inside a + # virtualenv without having to pre-activate it. Otherwise subprocess will + # either fail to find ansible executables or call the wrong ones. + # + # This must be run before we do run any subprocesses, and loading config + # does this as part of the ansible detection. + paths = [x for x in os.environ.get("PATH", "").split(os.pathsep) if x] + + # Expand ~ in PATH as it known to break many tools + expanded = False + for idx, path in enumerate(paths): + if "~" in path: # pragma: no cover + paths[idx] = os.path.expanduser(path) + expanded = True + if expanded: # pragma: no cover + # flake8: noqa: T201 + print( + "WARNING: PATH altered to expand ~ in it. Read https://stackoverflow.com/a/44704799/99834 and correct your system configuration.", + file=sys.stderr, + ) + + inject_paths = [] + + userbase_bin_path = f"{site.getuserbase()}/bin" + if userbase_bin_path not in paths and os.path.exists( + f"{userbase_bin_path}/bin/ansible" + ): + inject_paths.append(userbase_bin_path) + + py_path = os.path.dirname(sys.executable) + if py_path not in paths and os.path.exists(f"{py_path}/ansible"): + inject_paths.append(py_path) + + if inject_paths: + # flake8: noqa: T201 + print( + f"WARNING: PATH altered to include {', '.join(inject_paths)} :: This is usually a sign of broken local setup, which can cause unexpected behaviors.", + file=sys.stderr, + ) + if inject_paths or expanded: + os.environ["PATH"] = os.pathsep.join([*inject_paths, *paths]) + + # We do know that finding ansible in PATH does not guarantee that it is + # functioning or that is in fact the same version that was installed as + # our dependency, but addressing this would be done by ansible-compat. + for cmd in ("ansible", "git"): + if not shutil.which(cmd): + raise RuntimeError(f"Failed to find runtime dependency '{cmd}' in PATH") + + +# Based on Ansible implementation +def to_bool(value: Any) -> bool: # pragma: no cover + """Return a bool for the arg.""" + if value is None or isinstance(value, bool): + return bool(value) + if isinstance(value, str): + value = value.lower() + if value in ("yes", "on", "1", "true", 1): + return True + return False + + +def should_do_markup(stream: TextIO = sys.stdout) -> bool: # pragma: no cover + """Decide about use of ANSI colors.""" + py_colors = None + + # https://xkcd.com/927/ + for env_var in ["PY_COLORS", "CLICOLOR", "FORCE_COLOR", "ANSIBLE_FORCE_COLOR"]: + value = os.environ.get(env_var, None) + if value is not None: + py_colors = to_bool(value) + break + + # If deliberately disabled colors + if os.environ.get("NO_COLOR", None): + return False + + # User configuration requested colors + if py_colors is not None: + return to_bool(py_colors) + + term = os.environ.get("TERM", "") + if "xterm" in term: + return True + + if term == "dumb": + return False + + # Use tty detection logic as last resort because there are numerous + # factors that can make isatty return a misleading value, including: + # - stdin.isatty() is the only one returning true, even on a real terminal + # - stderr returning false if user user uses a error stream coloring solution + return stream.isatty() + + +if __name__ == "__main__": + _run_cli_entrypoint() diff --git a/src/ansiblelint/_internal/__init__.py b/src/ansiblelint/_internal/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/ansiblelint/_internal/__init__.py diff --git a/src/ansiblelint/_internal/internal_error.md b/src/ansiblelint/_internal/internal_error.md new file mode 100644 index 0000000..8db5e5e --- /dev/null +++ b/src/ansiblelint/_internal/internal_error.md @@ -0,0 +1,43 @@ +# internal-error + +This error can also be caused by internal bugs but also by custom rules. +Instead of just stopping tool execution, we generate the errors and continue +processing other files. This allows users to add this rule to their `warn_list` +until the root cause is fixed. + +Keep in mind that once an `internal-error` is found on a specific file, no +other rules will be executed on that same file. + +In almost all cases you will see more detailed information regarding the +original error or runtime exception that triggered this rule. + +If these files are broken on purpose, like some test fixtures, you need to add +them to the `exclude_paths`. + +## Problematic code + +```yaml +--- +- name: Some title {{ # <-- Ansible will not load this invalid jinja template + hosts: localhost + tasks: [] +``` + +## Correct code + +```yaml +--- +- name: Some title + hosts: localhost + tasks: [] +``` + +## ERROR! No hosts matched the subscripted pattern + +If you see this error, it means that you tried to index a host group variable +that is using an index above its size. + +Instead of doing something like `hosts: all[1]` which assumes that you have +at least two hosts in your current inventory, you better write something like +`hosts: "{{ all[1] | default([]) }}`, which is safe and do not produce runtime +errors. Use safe fallbacks to make your code more resilient. diff --git a/src/ansiblelint/_internal/load-failure.md b/src/ansiblelint/_internal/load-failure.md new file mode 100644 index 0000000..78daa0d --- /dev/null +++ b/src/ansiblelint/_internal/load-failure.md @@ -0,0 +1,12 @@ +# load-failure + +"Linter failed to process a file, possible invalid file. Possible reasons: + +* contains unsupported encoding (only UTF-8 is supported) +* not an Ansible file +* it contains some unsupported custom YAML objects (`!!` prefix) +* it was not able to decrypt an inline `!vault` block. + +This violation **is not** skippable, so it cannot be added to the `warn_list` +or the `skip_list`. If a vault decryption issue cannot be avoided, the +offending file can be added to `exclude_paths` configuration. diff --git a/src/ansiblelint/_internal/parser-error.md b/src/ansiblelint/_internal/parser-error.md new file mode 100644 index 0000000..f6c7649 --- /dev/null +++ b/src/ansiblelint/_internal/parser-error.md @@ -0,0 +1,5 @@ +# parser-error + +**AnsibleParserError.** + +Ansible parser fails; this usually indicates an invalid file. diff --git a/src/ansiblelint/_internal/rules.py b/src/ansiblelint/_internal/rules.py new file mode 100644 index 0000000..4d0bf49 --- /dev/null +++ b/src/ansiblelint/_internal/rules.py @@ -0,0 +1,194 @@ +"""Internally used rule classes.""" +from __future__ import annotations + +import inspect +import logging +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import RULE_DOC_URL + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + from ansiblelint.rules import RulesCollection + +_logger = logging.getLogger(__name__) +LOAD_FAILURE_MD = """\ +# load-failure + +"Linter failed to process a file, possible invalid file. Possible reasons: + +* contains unsupported encoding (only UTF-8 is supported) +* not an Ansible file +* it contains some unsupported custom YAML objects (`!!` prefix) +* it was not able to decrypt an inline `!vault` block. + +This violation **is not** skippable, so it cannot be added to the `warn_list` +or the `skip_list`. If a vault decryption issue cannot be avoided, the +offending file can be added to `exclude_paths` configuration. +""" + + +# Derived rules are likely to want to access class members, so: +# pylint: disable=unused-argument +class BaseRule: + """Root class used by Rules.""" + + id: str = "" + tags: list[str] = [] + description: str = "" + version_added: str = "" + severity: str = "" + link: str = "" + has_dynamic_tags: bool = False + needs_raw_task: bool = False + # We use _order to sort rules and to ensure that some run before others, + # _order 0 for internal rules + # _order 1 for rules that check that data can be loaded + # _order 5 implicit for normal rules + _order: int = 5 + _help: str | None = None + # Added when a rule is registered into a collection, gives access to options + _collection: RulesCollection | None = None + + @property + def help(self) -> str: + """Return a help markdown string for the rule.""" + if self._help is None: + self._help = "" + md_file = ( + Path(inspect.getfile(self.__class__)).parent + / f"{self.id.replace('-', '_')}.md" + ) + if md_file.exists(): + self._help = md_file.read_text(encoding="utf-8") + return self._help + + @property + def url(self) -> str: + """Return rule documentation url.""" + url = self.link + if not url: # pragma: no cover + url = RULE_DOC_URL + if self.id: + url += self.id + "/" + return url + + @property + def shortdesc(self) -> str: + """Return the short description of the rule, basically the docstring.""" + return self.__doc__ or "" + + def getmatches(self, file: Lintable) -> list[MatchError]: + """Return all matches while ignoring exceptions.""" + matches = [] + if not file.path.is_dir(): + for method in [self.matchlines, self.matchtasks, self.matchyaml]: + try: + matches.extend(method(file)) + except Exception as exc: # pylint: disable=broad-except + _logger.warning( + "Ignored exception from %s.%s while processing %s: %s", + self.__class__.__name__, + method, + str(file), + exc, + ) + else: + matches.extend(self.matchdir(file)) + return matches + + def matchlines(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific line.""" + return [] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError | list[MatchError]: + """Confirm if current rule is matching a specific task. + + If ``needs_raw_task`` (a class level attribute) is ``True``, then + the original task (before normalization) will be made available under + ``task["__raw_task__"]``. + """ + return False + + def matchtasks(self, file: Lintable) -> list[MatchError]: + """Return matches for a tasks file.""" + return [] + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific YAML text.""" + return [] + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + return [] + + def matchdir(self, lintable: Lintable) -> list[MatchError]: + """Return matches for lintable folders.""" + return [] + + def verbose(self) -> str: + """Return a verbose representation of the rule.""" + return self.id + ": " + self.shortdesc + "\n " + self.description + + def match(self, line: str) -> bool | str: + """Confirm if current rule matches the given string.""" + return False + + def __lt__(self, other: BaseRule) -> bool: + """Enable us to sort rules by their id.""" + return (self._order, self.id) < (other._order, other.id) + + def __repr__(self) -> str: + """Return a AnsibleLintRule instance representation.""" + return self.id + ": " + self.shortdesc + + +# pylint: enable=unused-argument + + +class RuntimeErrorRule(BaseRule): + """Unexpected internal error.""" + + id = "internal-error" + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v5.0.0" + _order = 0 + + +class AnsibleParserErrorRule(BaseRule): + """AnsibleParserError.""" + + id = "parser-error" + description = "Ansible parser fails; this usually indicates an invalid file." + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v5.0.0" + _order = 0 + + +class LoadingFailureRule(BaseRule): + """Failed to load or parse file.""" + + id = "load-failure" + description = "Linter failed to process a file, possible invalid file." + severity = "VERY_HIGH" + tags = ["core", "unskippable"] + version_added = "v4.3.0" + help = LOAD_FAILURE_MD + _order = 0 + + +class WarningRule(BaseRule): + """Other warnings detected during run.""" + + id = "warning" + severity = "LOW" + # should remain experimental as that would keep it warning only + tags = ["core", "experimental"] + version_added = "v6.8.0" + _order = 0 diff --git a/src/ansiblelint/_internal/warning.md b/src/ansiblelint/_internal/warning.md new file mode 100644 index 0000000..97d2577 --- /dev/null +++ b/src/ansiblelint/_internal/warning.md @@ -0,0 +1,9 @@ +# warning + +`warning` is a special type of internal rule that is used to report generic +runtime warnings found during execution. As stated by its name, they are not +counted as errors, so they do not influence the final outcome. + +- `warning[raw-non-string]` indicates that you are using + `[raw](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/raw_module.html#ansible-collections-ansible-builtin-raw-module)` + module with non-string arguments, which is not supported by Ansible. diff --git a/src/ansiblelint/_mockings.py b/src/ansiblelint/_mockings.py new file mode 100644 index 0000000..6b57d6c --- /dev/null +++ b/src/ansiblelint/_mockings.py @@ -0,0 +1,86 @@ +"""Utilities for mocking ansible modules and roles.""" +from __future__ import annotations + +import logging +import os +import re +import sys + +from ansiblelint.config import options +from ansiblelint.constants import ANSIBLE_MOCKED_MODULE, INVALID_CONFIG_RC + +_logger = logging.getLogger(__name__) + + +def _make_module_stub(module_name: str) -> None: + # a.b.c is treated a collection + if re.match(r"^(\w+|\w+\.\w+\.[\.\w]+)$", module_name): + parts = module_name.split(".") + if len(parts) < 3: + path = f"{options.cache_dir}/modules" + module_file = f"{options.cache_dir}/modules/{module_name}.py" + namespace = None + collection = None + else: + namespace = parts[0] + collection = parts[1] + path = f"{ options.cache_dir }/collections/ansible_collections/{ namespace }/{ collection }/plugins/modules/{ '/'.join(parts[2:-1]) }" + module_file = f"{path}/{parts[-1]}.py" + os.makedirs(path, exist_ok=True) + _write_module_stub( + filename=module_file, + name=module_file, + namespace=namespace, + collection=collection, + ) + else: + _logger.error("Config error: %s is not a valid module name.", module_name) + sys.exit(INVALID_CONFIG_RC) + + +def _write_module_stub( + filename: str, + name: str, + namespace: str | None = None, + collection: str | None = None, +) -> None: + """Write module stub to disk.""" + body = ANSIBLE_MOCKED_MODULE.format( + name=name, collection=collection, namespace=namespace + ) + with open(filename, "w", encoding="utf-8") as f: + f.write(body) + + +# pylint: disable=too-many-branches +def _perform_mockings() -> None: # noqa: C901 + """Mock modules and roles.""" + for role_name in options.mock_roles: + if re.match(r"\w+\.\w+\.\w+$", role_name): + namespace, collection, role_dir = role_name.split(".") + path = f"{options.cache_dir}/collections/ansible_collections/{ namespace }/{ collection }/roles/{ role_dir }/" + else: + path = f"{options.cache_dir}/roles/{role_name}" + # Avoid error from makedirs if destination is a broken symlink + if os.path.islink(path) and not os.path.exists(path): # pragma: no cover + _logger.warning("Removed broken symlink from %s", path) + os.unlink(path) + os.makedirs(path, exist_ok=True) + + if options.mock_modules: + for module_name in options.mock_modules: + _make_module_stub(module_name) + + +def _perform_mockings_cleanup() -> None: # noqa: C901 + """Clean up mocked modules and roles.""" + for role_name in options.mock_roles: + if re.match(r"\w+\.\w+\.\w+$", role_name): + namespace, collection, role_dir = role_name.split(".") + path = f"{options.cache_dir}/collections/ansible_collections/{ namespace }/{ collection }/roles/{ role_dir }/" + else: + path = f"{options.cache_dir}/roles/{role_name}" + try: + os.rmdir(path) + except OSError: + pass diff --git a/src/ansiblelint/_vendor/__init__.py b/src/ansiblelint/_vendor/__init__.py new file mode 100644 index 0000000..9cd4ee3 --- /dev/null +++ b/src/ansiblelint/_vendor/__init__.py @@ -0,0 +1,48 @@ +import os +import pkgutil +import sys +import warnings + +# This package exists to host vendored top-level Python packages for downstream packaging. Any Python packages +# installed beneath this one will be masked from the Ansible loader, and available from the front of sys.path. +# It is expected that the vendored packages will be loaded very early, so a warning will be fired on import of +# the top-level ansible package if any packages beneath this are already loaded at that point. +# +# Python packages may be installed here during downstream packaging using something like: +# pip install --upgrade -t (path to this dir) cryptography pyyaml packaging jinja2 + +# mask vendored content below this package from being accessed as a subpackage +__path__ = [] + + +def _ensure_vendored_path_entry() -> None: + """ + Ensure that any downstream-bundled content beneath this package is available at the top of sys.path + """ + # patch our vendored dir onto sys.path + vendored_path_entry = os.path.dirname(__file__) + vendored_module_names = { + m[1] for m in pkgutil.iter_modules([vendored_path_entry], "") + } # m[1] == m.name + + if vendored_module_names: + # patch us early to load vendored deps transparently + if vendored_path_entry in sys.path: + # handle reload case by removing the existing entry, wherever it might be + sys.path.remove(vendored_path_entry) + sys.path.insert(0, vendored_path_entry) + + already_loaded_vendored_modules = set(sys.modules.keys()).intersection( + vendored_module_names + ) + + if already_loaded_vendored_modules: + warnings.warn( + "One or more Python packages bundled by this ansible-lint distribution were already " + "loaded ({}). This may result in undefined behavior.".format( + ", ".join(sorted(already_loaded_vendored_modules)) + ) + ) + + +_ensure_vendored_path_entry() diff --git a/src/ansiblelint/_vendor/ansible_compat b/src/ansiblelint/_vendor/ansible_compat new file mode 120000 index 0000000..cd5012a --- /dev/null +++ b/src/ansiblelint/_vendor/ansible_compat @@ -0,0 +1 @@ +../../../.projects/ansible-compat/src/ansible_compat
\ No newline at end of file diff --git a/src/ansiblelint/app.py b/src/ansiblelint/app.py new file mode 100644 index 0000000..7d14504 --- /dev/null +++ b/src/ansiblelint/app.py @@ -0,0 +1,360 @@ +"""Application.""" +from __future__ import annotations + +import itertools +import logging +import os +from functools import lru_cache +from typing import TYPE_CHECKING, Any + +from ansible_compat.runtime import Runtime +from rich.markup import escape +from rich.table import Table + +from ansiblelint import formatters +from ansiblelint._mockings import _perform_mockings +from ansiblelint.color import console, console_stderr, render_yaml +from ansiblelint.config import PROFILES, get_version_warning +from ansiblelint.config import options as default_options +from ansiblelint.constants import RULE_DOC_URL, SUCCESS_RC, VIOLATIONS_FOUND_RC +from ansiblelint.errors import MatchError +from ansiblelint.loaders import IGNORE_TXT +from ansiblelint.stats import SummarizedResults, TagStats + +if TYPE_CHECKING: + from argparse import Namespace + from typing import Dict, Set # pylint: disable=ungrouped-imports + + from ansiblelint._internal.rules import BaseRule + from ansiblelint.file_utils import Lintable + from ansiblelint.runner import LintResult + + +_logger = logging.getLogger(__package__) + + +class App: + """App class represents an execution of the linter.""" + + def __init__(self, options: Namespace): + """Construct app run based on already loaded configuration.""" + options.skip_list = _sanitize_list_options(options.skip_list) + options.warn_list = _sanitize_list_options(options.warn_list) + + self.options = options + + formatter_factory = choose_formatter_factory(options) + self.formatter = formatter_factory(options.cwd, options.display_relative_path) + + # Without require_module, our _set_collections_basedir may fail + self.runtime = Runtime(isolated=True, require_module=True) + + def render_matches(self, matches: list[MatchError]) -> None: + """Display given matches (if they are not fixed).""" + matches = [match for match in matches if not match.fixed] + + if isinstance( + self.formatter, + (formatters.CodeclimateJSONFormatter, formatters.SarifFormatter), + ): + # If formatter CodeclimateJSONFormatter or SarifFormatter is chosen, + # then print only the matches in JSON + console.print( + self.formatter.format_result(matches), markup=False, highlight=False + ) + return + + ignored_matches = [match for match in matches if match.ignored] + fatal_matches = [match for match in matches if not match.ignored] + # Displayed ignored matches first + if ignored_matches: + _logger.warning( + "Listing %s violation(s) marked as ignored, likely already known", + len(ignored_matches), + ) + for match in ignored_matches: + if match.ignored: + # highlight must be off or apostrophes may produce unexpected results + console.print(self.formatter.format(match), highlight=False) + if fatal_matches: + _logger.warning( + "Listing %s violation(s) that are fatal", len(fatal_matches) + ) + for match in fatal_matches: + if not match.ignored: + console.print(self.formatter.format(match), highlight=False) + + # If run under GitHub Actions we also want to emit output recognized by it. + if os.getenv("GITHUB_ACTIONS") == "true" and os.getenv("GITHUB_WORKFLOW"): + formatter = formatters.AnnotationsFormatter(self.options.cwd, True) + for match in itertools.chain(fatal_matches, ignored_matches): + console.print(formatter.format(match), markup=False, highlight=False) + + # If sarif_file is set, we also dump the results to a sarif file. + if self.options.sarif_file: + sarif = formatters.SarifFormatter(self.options.cwd, True) + json = sarif.format_result(matches) + with open(self.options.sarif_file, "w", encoding="utf-8") as sarif_file: + sarif_file.write(json) + + def count_results(self, matches: list[MatchError]) -> SummarizedResults: + """Count failures and warnings in matches.""" + result = SummarizedResults() + + for match in matches: + # any ignores match counts as a warning + if match.ignored: + result.warnings += 1 + continue + # tag can include a sub-rule id: `yaml[document-start]` + # rule.id is the generic rule id: `yaml` + # *rule.tags is the list of the rule's tags (categories): `style` + if match.tag not in result.tag_stats: + result.tag_stats[match.tag] = TagStats( + tag=match.tag, count=1, associated_tags=match.rule.tags + ) + else: + result.tag_stats[match.tag].count += 1 + + if {match.tag, match.rule.id, *match.rule.tags}.isdisjoint( + self.options.warn_list + ): + # not in warn_list + if match.fixed: + result.fixed_failures += 1 + else: + result.failures += 1 + else: + result.tag_stats[match.tag].warning = True + if match.fixed: + result.fixed_warnings += 1 + else: + result.warnings += 1 + return result + + @staticmethod + def count_lintables(files: set[Lintable]) -> tuple[int, int]: + """Count total and modified files.""" + files_count = len(files) + changed_files_count = len([file for file in files if file.updated]) + return files_count, changed_files_count + + @staticmethod + def _get_matched_skippable_rules( + matches: list[MatchError], + ) -> dict[str, BaseRule]: + """Extract the list of matched rules, if skippable, from the list of matches.""" + matches_unignored = [match for match in matches if not match.ignored] + # match.tag is more specialized than match.rule.id + matched_rules = { + match.tag or match.rule.id: match.rule for match in matches_unignored + } + # remove unskippable rules from the list + for rule_id in list(matched_rules.keys()): + if "unskippable" in matched_rules[rule_id].tags: + matched_rules.pop(rule_id) + return matched_rules + + def report_outcome(self, result: LintResult, mark_as_success: bool = False) -> int: + """Display information about how to skip found rules. + + Returns exit code, 2 if errors were found, 0 when only warnings were found. + """ + msg = "" + + summary = self.count_results(result.matches) + files_count, changed_files_count = self.count_lintables(result.files) + + matched_rules = self._get_matched_skippable_rules(result.matches) + + if matched_rules and self.options.generate_ignore: + console_stderr.print(f"Writing ignore file to {IGNORE_TXT}") + lines = set() + for rule in result.matches: + lines.add(f"{rule.filename} {rule.tag}\n") + with open(IGNORE_TXT, "w", encoding="utf-8") as ignore_file: + ignore_file.write( + "# This file contains ignores rule violations for ansible-lint\n" + ) + ignore_file.writelines(sorted(list(lines))) + elif matched_rules and not self.options.quiet: + console_stderr.print( + "Read [link=https://ansible-lint.readthedocs.io/configuring/#ignoring-rules-for-entire-files]documentation[/link] for instructions on how to ignore specific rule violations." + ) + + # Do not deprecate the old tags just yet. Why? Because it is not currently feasible + # to migrate old tags to new tags. There are a lot of things out there that still + # use ansible-lint 4 (for example, Ansible Galaxy and Automation Hub imports). If we + # replace the old tags, those tools will report warnings. If we do not replace them, + # ansible-lint 5 will report warnings. + # + # We can do the deprecation once the ecosystem caught up at least a bit. + # for k, v in used_old_tags.items(): + # _logger.warning( + # "Replaced deprecated tag '%s' with '%s' but it will become an " + # "error in the future.", + # k, + # v, + # ) + + if self.options.write_list and "yaml" in self.options.skip_list: + _logger.warning( + "You specified '--write', but no files can be modified " + "because 'yaml' is in 'skip_list'." + ) + + if mark_as_success and summary.failures and not self.options.progressive: + mark_as_success = False + + if not self.options.quiet: + console_stderr.print(render_yaml(msg)) + self.report_summary( + summary, changed_files_count, files_count, is_success=mark_as_success + ) + + return SUCCESS_RC if mark_as_success else VIOLATIONS_FOUND_RC + + def report_summary( # pylint: disable=too-many-branches,too-many-locals + self, + summary: SummarizedResults, + changed_files_count: int, + files_count: int, + is_success: bool, + ) -> None: + """Report match and file counts.""" + # sort the stats by profiles + idx = 0 + rule_order = {} + + for profile, profile_config in PROFILES.items(): + for rule in profile_config["rules"]: + # print(profile, rule) + rule_order[rule] = (idx, profile) + idx += 1 + _logger.debug("Determined rule-profile order: %s", rule_order) + failed_profiles = set() + for tag, tag_stats in summary.tag_stats.items(): + if tag in rule_order: + tag_stats.order, tag_stats.profile = rule_order.get(tag, (idx, "")) + elif "[" in tag: + tag_stats.order, tag_stats.profile = rule_order.get( + tag.split("[")[0], (idx, "") + ) + if tag_stats.profile: + failed_profiles.add(tag_stats.profile) + summary.sort() + + if changed_files_count: + console_stderr.print(f"Modified {changed_files_count} files.") + + # determine which profile passed + summary.passed_profile = "" + passed_profile_count = 0 + for profile in PROFILES.keys(): + if profile in failed_profiles: + break + if profile != summary.passed_profile: + summary.passed_profile = profile + passed_profile_count += 1 + + stars = "" + if summary.tag_stats: + table = Table( + title="Rule Violation Summary", + collapse_padding=True, + box=None, + show_lines=False, + ) + table.add_column("count", justify="right") + table.add_column("tag") + table.add_column("profile") + table.add_column("rule associated tags") + for tag, stats in summary.tag_stats.items(): + table.add_row( + str(stats.count), + f"[link={RULE_DOC_URL}{ tag.split('[')[0] }]{escape(tag)}[/link]", + stats.profile, + f"{', '.join(stats.associated_tags)}{' (warning)' if stats.warning else ''}", + style="yellow" if stats.warning else "red", + ) + # rate stars for the top 5 profiles (min would not get + rating = 5 - (len(PROFILES.keys()) - passed_profile_count) + if 0 < rating < 6: + stars = f", {rating}/5 star rating" + + console_stderr.print(table) + console_stderr.print() + + if is_success: + msg = "[green]Passed[/] with " + else: + msg = "[red][bold]Failed[/][/] after " + + if summary.passed_profile: + msg += f"[bold]{summary.passed_profile}[/] profile" + if stars: + msg += stars + + msg += f": {summary.failures} failure(s), {summary.warnings} warning(s)" + if summary.fixed: + msg += f", and fixed {summary.fixed} issue(s)" + msg += f" on {files_count} files." + + # on offline mode and when run under pre-commit we do not want to + # check for updates. + if not self.options.offline and os.environ.get("PRE_COMMIT", "0") != "1": + version_warning = get_version_warning() + if version_warning: + msg += f"\n{version_warning}" + + console_stderr.print(msg) + + +def choose_formatter_factory( + options_list: Namespace, +) -> type[formatters.BaseFormatter[Any]]: + """Select an output formatter based on the incoming command line arguments.""" + r: type[formatters.BaseFormatter[Any]] = formatters.Formatter + if options_list.format == "quiet": + r = formatters.QuietFormatter + elif options_list.format in ("json", "codeclimate"): + r = formatters.CodeclimateJSONFormatter + elif options_list.format == "sarif": + r = formatters.SarifFormatter + elif options_list.parseable or options_list.format == "pep8": + r = formatters.ParseableFormatter + return r + + +def _sanitize_list_options(tag_list: list[str]) -> list[str]: + """Normalize list options.""" + # expand comma separated entries + tags = set() + for tag in tag_list: + tags.update(str(tag).split(",")) + # remove duplicates, and return as sorted list + return sorted(set(tags)) + + +@lru_cache +def get_app() -> App: + """Return the application instance, caching the return value.""" + offline = default_options.offline + app = App(options=default_options) + # Make linter use the cache dir from compat + default_options.cache_dir = app.runtime.cache_dir + + role_name_check = 0 + if "role-name" in app.options.warn_list: + role_name_check = 1 + elif "role-name" in app.options.skip_list: + role_name_check = 2 + + # mocking must happen before prepare_environment or galaxy install might + # fail. + _perform_mockings() + app.runtime.prepare_environment( + install_local=(not offline), offline=offline, role_name_check=role_name_check + ) + + return app diff --git a/src/ansiblelint/cli.py b/src/ansiblelint/cli.py new file mode 100644 index 0000000..9540d23 --- /dev/null +++ b/src/ansiblelint/cli.py @@ -0,0 +1,593 @@ +"""CLI parser setup and helpers.""" +from __future__ import annotations + +import argparse +import logging +import os +import sys +from argparse import Namespace +from pathlib import Path +from typing import Any, Callable, Sequence + +from ansiblelint.config import DEFAULT_KINDS, DEFAULT_WARN_LIST, PROFILES +from ansiblelint.constants import ( + CUSTOM_RULESDIR_ENVVAR, + DEFAULT_RULESDIR, + INVALID_CONFIG_RC, +) +from ansiblelint.file_utils import ( + Lintable, + abspath, + expand_path_vars, + guess_project_dir, + normpath, +) +from ansiblelint.schemas.main import validate_file_schema +from ansiblelint.yaml_utils import clean_json + +_logger = logging.getLogger(__name__) +_PATH_VARS = [ + "exclude_paths", + "rulesdir", +] + + +def expand_to_normalized_paths( + config: dict[str, Any], base_dir: str | None = None +) -> None: + """Mutate given config normalizing any path values in it.""" + # config can be None (-c /dev/null) + if not config: + return + base_dir = base_dir or os.getcwd() + for paths_var in _PATH_VARS: + if paths_var not in config: + continue # Cause we don't want to add a variable not present + + normalized_paths = [] + for path in config.pop(paths_var): + normalized_path = abspath(expand_path_vars(path), base_dir=base_dir) + + normalized_paths.append(normalized_path) + + config[paths_var] = normalized_paths + + +def load_config(config_file: str) -> dict[Any, Any]: + """Load configuration from disk.""" + config_path = None + if config_file: + config_path = os.path.abspath(config_file) + if not os.path.exists(config_path): + _logger.error("Config file not found '%s'", config_path) + sys.exit(INVALID_CONFIG_RC) + config_path = config_path or get_config_path() + if not config_path or not os.path.exists(config_path): + # a missing default config file should not trigger an error + return {} + + config_lintable = Lintable( + config_path, kind="ansible-lint-config", base_kind="text/yaml" + ) + + for error in validate_file_schema(config_lintable): + _logger.error("Invalid configuration file %s. %s", config_path, error) + sys.exit(INVALID_CONFIG_RC) + + config = clean_json(config_lintable.data) + if not isinstance(config, dict): + raise RuntimeError("Schema failed to properly validate the config file.") + config["config_file"] = config_path + config_dir = os.path.dirname(config_path) + expand_to_normalized_paths(config, config_dir) + + return config + + +def get_config_path(config_file: str | None = None) -> str | None: + """Return local config file.""" + if config_file: + project_filenames = [config_file] + else: + project_filenames = [".ansible-lint", ".config/ansible-lint.yml"] + parent = tail = os.getcwd() + while tail: + for project_filename in project_filenames: + filename = os.path.abspath(os.path.join(parent, project_filename)) + if os.path.exists(filename): + return filename + if os.path.exists(os.path.abspath(os.path.join(parent, ".git"))): + # Avoid looking outside .git folders as we do not want end-up + # picking config files from upper level projects if current + # project has no config. + return None + (parent, tail) = os.path.split(parent) + return None + + +class AbspathArgAction(argparse.Action): + """Argparse action to convert relative paths to absolute paths.""" + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + if isinstance(values, (str, Path)): + values = [values] + if values: + normalized_values = [ + Path(expand_path_vars(str(path))).resolve() for path in values + ] + previous_values = getattr(namespace, self.dest, []) + setattr(namespace, self.dest, previous_values + normalized_values) + + +class WriteArgAction(argparse.Action): + """Argparse action to handle the --write flag with optional args.""" + + _default = "__default__" + + # noinspection PyShadowingBuiltins + def __init__( # pylint: disable=too-many-arguments,redefined-builtin + self, + option_strings: list[str], + dest: str, + nargs: int | str | None = None, + const: Any = None, + default: Any = None, + type: Callable[[str], Any] | None = None, + choices: list[Any] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | None = None, + ) -> None: + """Create the argparse action with WriteArg-specific defaults.""" + if nargs is not None: + raise ValueError("nargs for WriteArgAction must not be set.") + if const is not None: + raise ValueError("const for WriteArgAction must not be set.") + super().__init__( + option_strings=option_strings, + dest=dest, + nargs="?", # either 0 (--write) or 1 (--write=a,b,c) argument + const=self._default, # --write (no option) implicitly stores this + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + lintables = getattr(namespace, "lintables", None) + if not lintables and isinstance(values, str): + # args are processed in order. + # If --write is after lintables, then that is not ambiguous. + # But if --write comes first, then it might actually be a lintable. + maybe_lintable = Path(values) + if maybe_lintable.exists(): + setattr(namespace, "lintables", [values]) + values = [] + if isinstance(values, str): + values = values.split(",") + default = [self.const] if isinstance(self.const, str) else self.const + previous_values = getattr(namespace, self.dest, default) or default + if not values: + values = previous_values + elif previous_values != default: + values = previous_values + values + setattr(namespace, self.dest, values) + + @classmethod + def merge_write_list_config( + cls, from_file: list[str], from_cli: list[str] + ) -> list[str]: + """Combine the write_list from file config with --write CLI arg. + + Handles the implicit "all" when "__default__" is present and file config is empty. + """ + if not from_file or "none" in from_cli: + # --write is the same as --write=all + return ["all" if value == cls._default else value for value in from_cli] + # --write means use the config from the config file + from_cli = [value for value in from_cli if value != cls._default] + return from_file + from_cli + + +def get_cli_parser() -> argparse.ArgumentParser: + """Initialize an argument parser.""" + parser = argparse.ArgumentParser() + + listing_group = parser.add_mutually_exclusive_group() + listing_group.add_argument( + "-P", + "--list-profiles", + dest="list_profiles", + default=False, + action="store_true", + help="List all profiles, no formatting options available.", + ) + listing_group.add_argument( + "-L", + "--list-rules", + dest="list_rules", + default=False, + action="store_true", + help="List all the rules. For listing rules only the following formats " + "for argument -f are supported: {brief, full, md} with 'brief' as default.", + ) + listing_group.add_argument( + "-T", + "--list-tags", + dest="list_tags", + action="store_true", + help="List all the tags and the rules they cover. Increase the verbosity level " + "with `-v` to include 'opt-in' tag and its rules.", + ) + parser.add_argument( + "-f", + "--format", + dest="format", + default=None, + choices=[ + "brief", + # "plain", + "full", + "md", + "json", + "codeclimate", + "quiet", + "pep8", + "sarif", + "docs", # internally used + ], + help="stdout formatting, json being an alias for codeclimate. (default: %(default)s)", + ) + parser.add_argument("--sarif-file", default=None, help="SARIF output file") + parser.add_argument( + "-q", + dest="quiet", + default=0, + action="count", + help="quieter, reduce verbosity, can be specified twice.", + ) + parser.add_argument( + "--profile", + dest="profile", + default=None, + action="store", + choices=PROFILES.keys(), + help="Specify which rules profile to be used.", + ) + parser.add_argument( + "-p", + "--parseable", + dest="parseable", + default=False, + action="store_true", + help="parseable output, same as '-f pep8'", + ) + parser.add_argument( + "--progressive", + dest="progressive", + default=False, + action="store_true", + help="Return success if number of violations compared with " + "previous git commit has not increased. This feature works " + "only in git repositories.", + ) + parser.add_argument( + "--project-dir", + dest="project_dir", + default=".", + help="Location of project/repository, autodetected based on location " + "of configuration file.", + ) + parser.add_argument( + "-r", + "--rules-dir", + action=AbspathArgAction, + dest="rulesdir", + default=[], + type=Path, + help="Specify custom rule directories. Add -R " + f"to keep using embedded rules from {DEFAULT_RULESDIR}", + ) + parser.add_argument( + "-R", + action="store_true", + default=False, + dest="use_default_rules", + help="Keep default rules when using -r", + ) + parser.add_argument( + "-s", + "--strict", + action="store_true", + default=False, + dest="strict", + help="Return non-zero exit code on warnings as well as errors", + ) + parser.add_argument( + "--write", + dest="write_list", + # this is a tri-state argument that takes an optional comma separated list: + # not provided, --write, --write=a,b,c + action=WriteArgAction, + help="Allow ansible-lint to reformat YAML files and run rule transforms " + "(Reformatting YAML files standardizes spacing, quotes, etc. " + "A rule transform can fix or simplify fixing issues identified by that rule). " + "You can limit the effective rule transforms (the 'write_list') by passing a " + "keywords 'all' or 'none' or a comma separated list of rule ids or rule tags. " + "YAML reformatting happens whenever '--write' or '--write=' is used. " + "'--write' and '--write=all' are equivalent: they allow all transforms to run. " + "The effective list of transforms comes from 'write_list' in the config file, " + "followed whatever '--write' args are provided on the commandline. " + "'--write=none' resets the list of transforms to allow reformatting YAML " + "without running any of the transforms (ie '--write=none,rule-id' will " + "ignore write_list in the config file and only run the rule-id transform).", + ) + parser.add_argument( + "--show-relpath", + dest="display_relative_path", + action="store_false", + default=True, + help="Display path relative to CWD", + ) + parser.add_argument( + "-t", + "--tags", + dest="tags", + action="append", + default=[], + help="only check rules whose id/tags match these values", + ) + parser.add_argument( + "-v", + dest="verbosity", + action="count", + help="Increase verbosity level (-vv for more)", + default=0, + ) + parser.add_argument( + "-x", + "--skip-list", + dest="skip_list", + default=[], + action="append", + help="only check rules whose id/tags do not match these values. \ + e.g: --skip-list=name,run-once", + ) + parser.add_argument( + "--generate-ignore", + dest="generate_ignore", + action="store_true", + default=False, + help="Generate a text file '.ansible-lint-ignore' that ignores all found violations. Each line contains filename and rule id separated by a space.", + ) + parser.add_argument( + "-w", + "--warn-list", + dest="warn_list", + default=[], + action="append", + help="only warn about these rules, unless overridden in " + f"config file. Current version default value is: {', '.join(DEFAULT_WARN_LIST)}", + ) + parser.add_argument( + "--enable-list", + dest="enable_list", + default=[], + action="append", + help="activate optional rules by their tag name", + ) + # Do not use store_true/store_false because they create opposite defaults. + parser.add_argument( + "--nocolor", + dest="colored", + action="store_const", + const=False, + help="disable colored output, same as NO_COLOR=1", + ) + parser.add_argument( + "--force-color", + dest="colored", + action="store_const", + const=True, + help="Force colored output, same as FORCE_COLOR=1", + ) + parser.add_argument( + "--exclude", + dest="exclude_paths", + action=AbspathArgAction, + type=Path, + default=[], + help="path to directories or files to skip. " "This option is repeatable.", + ) + parser.add_argument( + "-c", + "--config-file", + dest="config_file", + help="Specify configuration file to use. By default it will look for '.ansible-lint' or '.config/ansible-lint.yml'", + ) + parser.add_argument( + "--offline", + dest="offline", + action="store_const", + const=True, + help="Disable installation of requirements.yml and schema refreshing", + ) + parser.add_argument( + "--version", + action="store_true", + ) + parser.add_argument( + dest="lintables", + nargs="*", + action="extend", + help="One or more files or paths. When missing it will enable auto-detection mode.", + ) + + return parser + + +def merge_config(file_config: dict[Any, Any], cli_config: Namespace) -> Namespace: + """Combine the file config with the CLI args.""" + bools = ( + "display_relative_path", + "parseable", + "quiet", + "strict", + "use_default_rules", + "progressive", + "offline", + ) + # maps lists to their default config values + lists_map = { + "exclude_paths": [".cache", ".git", ".hg", ".svn", ".tox"], + "rulesdir": [], + "skip_list": [], + "tags": [], + "warn_list": DEFAULT_WARN_LIST, + "mock_modules": [], + "mock_roles": [], + "enable_list": [], + "only_builtins_allow_collections": [], + "only_builtins_allow_modules": [], + # do not include "write_list" here. See special logic below. + } + + scalar_map = { + "loop_var_prefix": None, + "project_dir": ".", + "profile": None, + "sarif_file": None, + } + + if not file_config: + # use defaults if we don't have a config file and the commandline + # parameter is not set + for entry, default in lists_map.items(): + if not getattr(cli_config, entry, None): + setattr(cli_config, entry, default) + return cli_config + + for entry in bools: + file_value = file_config.pop(entry, False) + v = getattr(cli_config, entry) or file_value + setattr(cli_config, entry, v) + + for entry, default in scalar_map.items(): + file_value = file_config.pop(entry, default) + v = getattr(cli_config, entry, None) or file_value + setattr(cli_config, entry, v) + + # if either commandline parameter or config file option is set merge + # with the other, if neither is set use the default + for entry, default in lists_map.items(): + if getattr(cli_config, entry, None) or entry in file_config.keys(): + value = getattr(cli_config, entry, []) + value.extend(file_config.pop(entry, [])) + else: + value = default + setattr(cli_config, entry, value) + + # "write_list" config has special merge rules + entry = "write_list" + setattr( + cli_config, + entry, + WriteArgAction.merge_write_list_config( + from_file=file_config.pop(entry, []), + from_cli=getattr(cli_config, entry, []) or [], + ), + ) + + if "verbosity" in file_config: + cli_config.verbosity = cli_config.verbosity + file_config.pop("verbosity") + + # merge options that can be set only via a file config + for entry, value in file_config.items(): + setattr(cli_config, entry, value) + + # append default kinds to the custom list + kinds = file_config.get("kinds", []) + kinds.extend(DEFAULT_KINDS) + setattr(cli_config, "kinds", kinds) + + return cli_config + + +def get_config(arguments: list[str]) -> Namespace: + """Extract the config based on given args.""" + parser = get_cli_parser() + options = parser.parse_args(arguments) + + # docs is not document, being used for internal documentation building + if options.list_rules and options.format not in [ + None, + "brief", + "full", + "md", + "docs", + ]: + parser.error( + f"argument -f: invalid choice: '{options.format}'. " + f"In combination with argument -L only 'brief', " + f"'rich' or 'md' are supported with -f." + ) + + # save info about custom config file, as options.config_file may be modified by merge_config + has_custom_config = not options.config_file + + file_config = load_config(options.config_file) + + config = merge_config(file_config, options) + + options.rulesdirs = get_rules_dirs(options.rulesdir, options.use_default_rules) + + if has_custom_config and options.project_dir == ".": + project_dir = guess_project_dir(options.config_file) + options.project_dir = os.path.expanduser(normpath(project_dir)) + + if not options.project_dir or not os.path.exists(options.project_dir): + raise RuntimeError( + f"Failed to determine a valid project_dir: {options.project_dir}" + ) + + # Compute final verbosity level by subtracting -q counter. + options.verbosity -= options.quiet + return config + + +def print_help(file: Any = sys.stdout) -> None: + """Print help test to the given stream.""" + get_cli_parser().print_help(file=file) + + +def get_rules_dirs(rulesdir: list[str], use_default: bool = True) -> list[str]: + """Return a list of rules dirs.""" + default_ruledirs = [DEFAULT_RULESDIR] + default_custom_rulesdir = os.environ.get( + CUSTOM_RULESDIR_ENVVAR, os.path.join(DEFAULT_RULESDIR, "custom") + ) + custom_ruledirs = sorted( + str(x.resolve()) + for x in Path(default_custom_rulesdir).iterdir() + if x.is_dir() and (x / "__init__.py").exists() + ) + + if use_default: + return rulesdir + custom_ruledirs + default_ruledirs + + return rulesdir or custom_ruledirs + default_ruledirs diff --git a/src/ansiblelint/color.py b/src/ansiblelint/color.py new file mode 100644 index 0000000..373ef78 --- /dev/null +++ b/src/ansiblelint/color.py @@ -0,0 +1,108 @@ +"""Console coloring and terminal support.""" +from __future__ import annotations + +from typing import Any + +import rich +import rich.markdown +from rich.console import Console +from rich.default_styles import DEFAULT_STYLES +from rich.style import Style +from rich.syntax import Syntax +from rich.theme import Theme + +# WARNING: When making style changes, be sure you test the output of +# `ansible-lint -L` on multiple terminals with dark/light themes, including: +# - iTerm2 (macOS) - bold might not be rendered differently +# - vscode integrated terminal - bold might not be rendered differently, links will not work +# +# When it comes to colors being used, try to match: +# - Ansible official documentation theme, https://docs.ansible.com/ansible/latest/dev_guide/developing_api.html +# - VSCode Ansible extension for syntax highlighting +# - GitHub markdown theme +# +# Current values: (docs) +# codeblock border: #404040 +# codeblock background: #edf0f2 +# codeblock comment: #6a737d (also italic) +# teletype-text: #e74c3c (red) +# teletype-text-border: 1px solid #e1e4e5 (background white) +# text: #404040 +# codeblock other-text: #555555 (black-ish) +# codeblock property: #22863a (green) +# codeblock integer: 032f62 (blue) +# codeblock command: #0086b3 (blue) - [shell] +# == python == +# class: #445588 (dark blue and bold) +# docstring: #dd1144 (red) +# self: #999999 (light-gray) +# method/function: #990000 (dark-red) +# number: #009999 cyan +# keywords (def,None,False,len,from,import): #007020 (green) bold +# super|dict|print: #0086b3 light-blue +# __name__: #bb60d5 (magenta) +# string: #dd1144 (light-red) +DEFAULT_STYLES.update( + { + # "code": Style(color="bright_black", bgcolor="red"), + "markdown.code": Style(color="bright_black"), + "markdown.code_block": Style(dim=True, color="cyan"), + } +) + + +_theme = Theme( + { + "info": "cyan", + "warning": "yellow", + "danger": "bold red", + "title": "yellow", + "error": "bright_red", + "filename": "blue", + } +) +console_options: dict[str, Any] = {"emoji": False, "theme": _theme, "soft_wrap": True} +console_options_stderr = console_options.copy() +console_options_stderr["stderr"] = True + +console = rich.get_console() +console_stderr = Console(**console_options_stderr) + + +def reconfigure(new_options: dict[str, Any]) -> None: + """Reconfigure console options.""" + global console_options # pylint: disable=global-statement,invalid-name + global console_stderr # pylint: disable=global-statement,invalid-name,global-variable-not-assigned + + console_options = new_options + rich.reconfigure(**new_options) + # see https://github.com/willmcgugan/rich/discussions/484#discussioncomment-200182 + new_console_options_stderr = console_options.copy() + new_console_options_stderr["stderr"] = True + tmp_console = Console(**new_console_options_stderr) + console_stderr.__dict__ = tmp_console.__dict__ + + +def render_yaml(text: str) -> Syntax: + """Colorize YAMl for nice display.""" + return Syntax(text, "yaml", theme="ansi_dark") + + +# pylint: disable=redefined-outer-name,unused-argument +def _rich_codeblock_custom_rich_console( + self: rich.markdown.CodeBlock, + console: Console, + options: rich.console.ConsoleOptions, +) -> rich.console.RenderResult: # pragma: no cover + code = str(self.text).rstrip() + syntax = Syntax( + code, + self.lexer_name, + theme=self.theme, + word_wrap=True, + background_color="default", + ) + yield syntax + + +rich.markdown.CodeBlock.__rich_console__ = _rich_codeblock_custom_rich_console # type: ignore diff --git a/src/ansiblelint/config.py b/src/ansiblelint/config.py new file mode 100644 index 0000000..c9262c9 --- /dev/null +++ b/src/ansiblelint/config.py @@ -0,0 +1,266 @@ +"""Store configuration options as a singleton.""" +from __future__ import annotations + +import json +import logging +import os +import sys +import time +import urllib.request +import warnings +from argparse import Namespace +from functools import lru_cache +from pathlib import Path +from typing import Any +from urllib.error import HTTPError, URLError + +from packaging.version import Version + +from ansiblelint import __version__ +from ansiblelint.loaders import yaml_from_file + +_logger = logging.getLogger(__name__) + + +CACHE_DIR = ( + os.path.expanduser(os.environ.get("XDG_CACHE_HOME", "~/.cache")) + "/ansible-lint" +) + +DEFAULT_WARN_LIST = [ + "experimental", + "jinja[spacing]", # warning until we resolve all reported false-positives +] + +DEFAULT_KINDS = [ + # Do not sort this list, order matters. + {"jinja2": "**/*.j2"}, # jinja2 templates are not always parsable as something else + {"jinja2": "**/*.j2.*"}, + {"yaml": ".github/**/*.{yaml,yml}"}, # github workflows + {"text": "**/templates/**/*.*"}, # templates are likely not validable + {"execution-environment": "**/execution-environment.yml"}, + {"ansible-lint-config": "**/.ansible-lint"}, + {"ansible-lint-config": "**/.config/ansible-lint.yml"}, + {"ansible-navigator-config": "**/ansible-navigator.{yaml,yml}"}, + {"inventory": "**/inventory/**.{yaml,yml}"}, + {"requirements": "**/meta/requirements.{yaml,yml}"}, # v1 only + # https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html + {"galaxy": "**/galaxy.yml"}, # Galaxy collection meta + {"reno": "**/releasenotes/*/*.{yaml,yml}"}, # reno release notes + {"tasks": "**/tasks/**/*.{yaml,yml}"}, + {"rulebook": "**/rulebooks/*.{yml,yaml"}, + {"playbook": "**/playbooks/*.{yml,yaml}"}, + {"playbook": "**/*playbook*.{yml,yaml}"}, + {"role": "**/roles/*/"}, + {"handlers": "**/handlers/*.{yaml,yml}"}, + {"vars": "**/{host_vars,group_vars,vars,defaults}/**/*.{yaml,yml}"}, + {"test-meta": "**/tests/integration/targets/*/meta/main.{yaml,yml}"}, + {"meta": "**/meta/main.{yaml,yml}"}, + {"meta-runtime": "**/meta/runtime.{yaml,yml}"}, + {"arg_specs": "**/meta/argument_specs.{yaml,yml}"}, # role argument specs + {"yaml": ".config/molecule/config.{yaml,yml}"}, # molecule global config + { + "requirements": "**/molecule/*/{collections,requirements}.{yaml,yml}" + }, # molecule old collection requirements (v1), ansible 2.8 only + {"yaml": "**/molecule/*/{base,molecule}.{yaml,yml}"}, # molecule config + {"requirements": "**/requirements.{yaml,yml}"}, # v2 and v1 + {"playbook": "**/molecule/*/*.{yaml,yml}"}, # molecule playbooks + {"yaml": "**/{.ansible-lint,.yamllint}"}, + {"changelog": "**/changelogs/changelog.yaml"}, + {"yaml": "**/*.{yaml,yml}"}, + {"yaml": "**/.*.{yaml,yml}"}, +] + +BASE_KINDS = [ + # These assignations are only for internal use and are only inspired by + # MIME/IANA model. Their purpose is to be able to process a file based on + # it type, including generic processing of text files using the prefix. + { + "text/jinja2": "**/*.j2" + }, # jinja2 templates are not always parsable as something else + {"text/jinja2": "**/*.j2.*"}, + {"text": "**/templates/**/*.*"}, # templates are likely not validable + {"text/json": "**/*.json"}, # standardized + {"text/markdown": "**/*.md"}, # https://tools.ietf.org/html/rfc7763 + {"text/rst": "**/*.rst"}, # https://en.wikipedia.org/wiki/ReStructuredText + {"text/ini": "**/*.ini"}, + # YAML has no official IANA assignation + {"text/yaml": "**/{.ansible-lint,.yamllint}"}, + {"text/yaml": "**/*.{yaml,yml}"}, + {"text/yaml": "**/.*.{yaml,yml}"}, +] + +PROFILES = yaml_from_file(Path(__file__).parent / "data" / "profiles.yml") + +LOOP_VAR_PREFIX = "^(__|{role}_)" + +options = Namespace( + cache_dir=None, + colored=True, + configured=False, + cwd=".", + display_relative_path=True, + exclude_paths=[], + format="brief", + lintables=[], + list_rules=False, + list_tags=False, + write_list=[], + parseable=False, + quiet=False, + rulesdirs=[], + skip_list=[], + tags=[], + verbosity=False, + warn_list=[], + kinds=DEFAULT_KINDS, + mock_filters=[], + mock_modules=[], + mock_roles=[], + loop_var_prefix=None, + only_builtins_allow_collections=[], + only_builtins_allow_modules=[], + var_naming_pattern=None, + offline=False, + project_dir=".", # default should be valid folder (do not use None here) + extra_vars=None, + enable_list=[], + skip_action_validation=True, + strict=False, + rules={}, # Placeholder to set and keep configurations for each rule. + profile=None, + task_name_prefix="{stem} | ", + sarif_file=None, +) + +# Used to store detected tag deprecations +used_old_tags: dict[str, str] = {} + +# Used to store collection list paths (with mock paths if needed) +collection_list: list[str] = [] + + +def get_rule_config(rule_id: str) -> dict[str, Any]: + """Get configurations for the rule ``rule_id``.""" + rule_config = options.rules.get(rule_id, {}) + if not isinstance(rule_config, dict): # pragma: no branch + raise RuntimeError(f"Invalid rule config for {rule_id}: {rule_config}") + return rule_config + + +@lru_cache +def ansible_collections_path() -> str: + """Return collection path variable for current version of Ansible.""" + # respect Ansible behavior, which is to load old name if present + for env_var in [ + "ANSIBLE_COLLECTIONS_PATHS", + "ANSIBLE_COLLECTIONS_PATH", + ]: # pragma: no cover + if env_var in os.environ: + return env_var + return "ANSIBLE_COLLECTIONS_PATH" + + +def in_venv() -> bool: + """Determine whether Python is running from a venv.""" + if hasattr(sys, "real_prefix") or os.environ.get("CONDA_EXE", None) is not None: + return True + + pfx = getattr(sys, "base_prefix", sys.prefix) + return pfx != sys.prefix + + +def guess_install_method() -> str: + """Guess if pip upgrade command should be used.""" + package_name = "ansible-lint" + pip = "" + if in_venv(): + _logger.debug("Found virtualenv, assuming `pip3 install` will work.") + pip = f"pip install --upgrade {package_name}" + elif __file__.startswith(os.path.expanduser("~/.local/lib")): + _logger.debug( + "Found --user installation, assuming `pip3 install --user` will work." + ) + pip = f"pip3 install --user --upgrade {package_name}" + + # By default we assume pip is not safe to be used + use_pip = False + try: + # Use pip to detect if is safe to use it to upgrade the package. + # We do imports here to for performance and reasons, and also in order + # to avoid errors if pip internals change. Also we want to avoid having + # to add pip as a dependency, so we make use of it only when present. + + # trick to avoid runtime warning from inside pip: _distutils_hack/__init__.py:33: UserWarning: Setuptools is replacing distutils. + with warnings.catch_warnings(record=True): + warnings.simplefilter("always") + # pylint: disable=import-outside-toplevel + from pip._internal.metadata import get_default_environment + from pip._internal.req.req_uninstall import uninstallation_paths + + dist = get_default_environment().get_distribution(package_name) + if dist: + logging.debug("Found %s dist", dist) + for _ in uninstallation_paths(dist): + use_pip = True + else: + logging.debug("Skipping %s as it is not installed.", package_name) + use_pip = False + # pylint: disable=broad-except + except Exception as exc: + # On Fedora 36, we got a AttributeError exception from pip that we want to avoid + logging.debug(exc) + use_pip = False + + # We only want to recommend pip for upgrade if it looks safe to do so. + return pip if use_pip else "" + + +def get_version_warning() -> str: + """Display warning if current version is outdated.""" + # 0.1dev1 is special fallback version + if __version__ == "0.1.dev1": # pragma: no cover + return "" + + msg = "" + data = {} + current_version = Version(__version__) + + if not os.path.exists(CACHE_DIR): # pragma: no cover + os.makedirs(CACHE_DIR) + cache_file = f"{CACHE_DIR}/latest.json" + refresh = True + if os.path.exists(cache_file): + age = time.time() - os.path.getmtime(cache_file) + if age < 24 * 60 * 60: + refresh = False + with open(cache_file, encoding="utf-8") as f: + data = json.load(f) + + if refresh or not data: + release_url = ( + "https://api.github.com/repos/ansible/ansible-lint/releases/latest" + ) + try: + with urllib.request.urlopen(release_url) as url: + data = json.load(url) + with open(cache_file, "w", encoding="utf-8") as f: + json.dump(data, f) + except (URLError, HTTPError) as exc: # pragma: no cover + _logger.debug( + "Unable to fetch latest version from %s due to: %s", release_url, exc + ) + return "" + + html_url = data["html_url"] + new_version = Version(data["tag_name"][1:]) # removing v prefix from tag + + if current_version > new_version: + msg = "[dim]You are using a pre-release version of ansible-lint.[/]" + elif current_version < new_version: + msg = f"""[warning]A new release of ansible-lint is available: [red]{current_version}[/] → [green][link={html_url}]{new_version}[/][/][/]""" + + pip = guess_install_method() + if pip: + msg += f" Upgrade by running: [info]{pip}[/]" + + return msg diff --git a/src/ansiblelint/constants.py b/src/ansiblelint/constants.py new file mode 100644 index 0000000..0fec8fd --- /dev/null +++ b/src/ansiblelint/constants.py @@ -0,0 +1,168 @@ +"""Constants used by AnsibleLint.""" +import os.path +from enum import Enum +from typing import Literal + +DEFAULT_RULESDIR = os.path.join(os.path.dirname(__file__), "rules") +CUSTOM_RULESDIR_ENVVAR = "ANSIBLE_LINT_CUSTOM_RULESDIR" +RULE_DOC_URL = "https://ansible-lint.readthedocs.io/rules/" + +SUCCESS_RC = 0 +VIOLATIONS_FOUND_RC = 2 +INVALID_CONFIG_RC = 3 +LOCK_TIMEOUT_RC = 4 +EXIT_CONTROL_C_RC = 130 + +# Minimal version of Ansible we support for runtime +ANSIBLE_MIN_VERSION = "2.12" + +ANSIBLE_MOCKED_MODULE = """\ +# This is a mocked Ansible module generated by ansible-lint +from ansible.module_utils.basic import AnsibleModule + +DOCUMENTATION = ''' +module: {name} + +short_description: Mocked +version_added: "1.0.0" +description: Mocked + +author: + - ansible-lint (@nobody) +''' +EXAMPLES = '''mocked''' +RETURN = '''mocked''' + + +def main(): + result = dict( + changed=False, + original_message='', + message='') + + module = AnsibleModule( + argument_spec=dict(), + supports_check_mode=True, + ) + module.exit_json(**result) + + +if __name__ == "__main__": + main() +""" + +FileType = Literal[ + "playbook", + "rulebook", + "meta", # role meta + "meta-runtime", + "tasks", # includes pre_tasks, post_tasks + "handlers", # very similar to tasks but with some specifics + # https://docs.ansible.com/ansible/latest/galaxy/user_guide.html#installing-roles-and-collections-from-the-same-requirements-yml-file + "requirements", + "role", # that is a folder! + "yaml", # generic yaml file, previously reported as unknown file type + "ansible-lint-config", + "", # unknown file type +] + + +# Aliases for deprecated tags/ids and their newer names +RENAMED_TAGS = { + "102": "no-jinja-when", + "104": "deprecated-bare-vars", + "105": "deprecated-module", + "106": "role-name", + "202": "risky-octal", + "203": "no-tabs", + "205": "playbook-extension", + "206": "jinja[spacing]", + "207": "jinja[invalid]", + "208": "risky-file-permissions", + "301": "no-changed-when", + "302": "deprecated-command-syntax", + "303": "command-instead-of-module", + "304": "inline-env-var", + "305": "command-instead-of-shell", + "306": "risky-shell-pipe", + "401": "latest[git]", + "402": "latest[hg]", + "403": "package-latest", + "404": "no-relative-paths", + "501": "partial-become", + "502": "unnamed-task", + "503": "no-handler", + "504": "deprecated-local-action", + "505": "missing-import", + "601": "literal-compare", + "602": "empty-string-compare", + "701": "meta-no-info", + "702": "meta-no-tags", + "703": "meta-incorrect", + "704": "meta-video-links", + "911": "syntax-check", + "var-spacing": "jinja[spacing]", + "unnamed-task": "name[missing]", + "git-latest": "latest[git]", + "hg-latest": "latest[hg]", + "no-jinja-nesting": "jinja[invalid]", + "no-loop-var-prefix": "loop-var-prefix", + "fqcn-builtins": "fqcn[action-core]", +} + +PLAYBOOK_TASK_KEYWORDS = [ + "tasks", + "handlers", + "pre_tasks", + "post_tasks", +] +NESTED_TASK_KEYS = [ + "block", + "always", + "rescue", +] + +# Keys that are used internally when parsing YAML/JSON files +SKIPPED_RULES_KEY = "__skipped_rules__" +LINE_NUMBER_KEY = "__line__" +FILENAME_KEY = "__file__" +ANNOTATION_KEYS = [FILENAME_KEY, LINE_NUMBER_KEY, SKIPPED_RULES_KEY] + +INCLUSION_ACTION_NAMES = { + "include", + "include_tasks", + "import_playbook", + "import_tasks", + "ansible.builtin.include", + "ansible.builtin.include_tasks", + "ansible.builtin.import_playbook", + "ansible.builtin.import_tasks", +} + +ROLE_IMPORT_ACTION_NAMES = { + "ansible.builtin.import_role", + "ansible.builtin.include_role", + "ansible.legacy.import_role", + "ansible.legacy.include_role", + "import_role", + "include_role", +} + +# Newer versions of git might fail to run when different file ownership is +# found of repo. One example is on GHA runners executing containerized +# reusable actions, where the mounted volume might have different owner. +# +# https://github.com/ansible/ansible-lint-action/issues/138 +GIT_CMD = ["git", "-c", f"safe.directory={os.getcwd()}"] + + +class States(Enum): + """States used are used as sentinel values in various places.""" + + NOT_LOADED = "File not loaded" + LOAD_FAILED = "File failed to load" + UNKNOWN_DATA = "Unknown data" + + def __bool__(self) -> bool: + """Ensure all states evaluate as False as booleans.""" + return False diff --git a/src/ansiblelint/data/profiles.yml b/src/ansiblelint/data/profiles.yml new file mode 100644 index 0000000..8de92fd --- /dev/null +++ b/src/ansiblelint/data/profiles.yml @@ -0,0 +1,121 @@ +--- +# Do not change sorting order of the primary keys as they also represent how +# progressive the profiles are, each one extending the one before it. +min: + description: > + The `min` profile ensures that Ansible can load content. + Rules in this profile are mandatory because they prevent fatal errors. + You can add files to the exclude list or provide dependencies to load the + correct files. + extends: null + rules: + internal-error: + load-failure: + parser-error: + syntax-check: +basic: + description: > + The `basic` profile prevents common coding issues and enforces standard styles and formatting. + extends: min + rules: + command-instead-of-module: + command-instead-of-shell: + deprecated-bare-vars: + deprecated-command-syntax: + deprecated-local-action: + deprecated-module: + inline-env-var: + key-order: + literal-compare: + jinja: + no-jinja-when: + no-tabs: + partial-become: + playbook-extension: + role-name: + schema: # can cover lots of rules, but not really be able to give best error messages + name: + var-naming: + yaml: + skip_list: # just because we enable them in following profiles + - name[template] + - name[casing] +moderate: + description: > + The `moderate` profile ensures that content adheres to best practices for making content easier to read and maintain. + extends: basic + rules: + name[template]: + name[imperative]: + url: https://github.com/ansible/ansible-lint/issues/2170 + name[casing]: + no-free-form: # schema-related + url: https://github.com/ansible/ansible-lint/issues/2117 + spell-var-name: + url: https://github.com/ansible/ansible-lint/issues/2168 +safety: + description: > + The `safety` profile avoids module calls that can have non-determinant outcomes or security concerns. + extends: moderate + rules: + avoid-implicit: + latest: + package-latest: + risky-file-permissions: + risky-octal: + risky-shell-pipe: +shared: + description: > + The `shared` profile ensures that content follows best practices for packaging and publishing. + This profile is intended for content creators who want to make Ansible + playbooks, roles, or collections available from + [galaxy.ansible.com](https://galaxy.ansible.com/), + [automation-hub](https://console.redhat.com/ansible/automation-hub), + or a private instance. + extends: safety + rules: + galaxy: # <-- applies to both galaxy and automation-hub + ignore-errors: + layout: + url: https://github.com/ansible/ansible-lint/issues/1900 + meta-incorrect: + meta-no-info: + meta-no-tags: + meta-video-links: + meta-version: + url: https://github.com/ansible/ansible-lint/issues/2103 + meta-runtime: + url: https://github.com/ansible/ansible-lint/issues/2102 + no-changed-when: + no-changelog: + url: https://github.com/ansible/ansible-lint/issues/2101 + no-handler: + no-relative-paths: + max-block-depth: + url: https://github.com/ansible/ansible-lint/issues/2173 + max-tasks: + url: https://github.com/ansible/ansible-lint/issues/2172 + unsafe-loop: + # unsafe-loop[prefix] (currently named "no-var-prefix") + # [unsafe-loop[var-prefix|iterator]] + url: https://github.com/ansible/ansible-lint/issues/2038 +production: + description: > + The `production` profile ensures that content meets requirements for + inclusion in [Ansible Automation Platform (AAP)](https://www.redhat.com/en/technologies/management/ansible) + as validated or certified content. + extends: shared + rules: + avoid-dot-notation: + url: https://github.com/ansible/ansible-lint/issues/2174 + disallowed-ignore: # [sanity] + url: https://github.com/ansible/ansible-lint/issues/2121 + fqcn: + import-task-no-when: + url: https://github.com/ansible/ansible-lint/issues/2219 + meta-no-dependencies: + url: https://github.com/ansible/ansible-lint/issues/2159 + single-entry-point: + url: https://github.com/ansible/ansible-lint/issues/2242 + use-loop: + url: https://github.com/ansible/ansible-lint/issues/2204 diff --git a/src/ansiblelint/errors.py b/src/ansiblelint/errors.py new file mode 100644 index 0000000..c5a1895 --- /dev/null +++ b/src/ansiblelint/errors.py @@ -0,0 +1,144 @@ +"""Exceptions and error representations.""" +from __future__ import annotations + +import functools +from typing import Any + +from ansiblelint._internal.rules import BaseRule, RuntimeErrorRule +from ansiblelint.config import options +from ansiblelint.file_utils import Lintable, normpath + + +# pylint: disable=too-many-instance-attributes +@functools.total_ordering +class MatchError(ValueError): + """Rule violation detected during linting. + + It can be raised as Exception but also just added to the list of found + rules violations. + + Note that line argument is not considered when building hash of an + instance. + """ + + tag = "" + + # IMPORTANT: any additional comparison protocol methods must return + # IMPORTANT: `NotImplemented` singleton to allow the check to use the + # IMPORTANT: other object's fallbacks. + # Ref: https://docs.python.org/3/reference/datamodel.html#object.__lt__ + + # pylint: disable=too-many-arguments + def __init__( + self, + message: str | None = None, + # most linters report use (1,1) base, including yamllint and flake8 + # we should never report line 0 or column 0 in output. + linenumber: int = 1, + column: int | None = None, + details: str = "", + filename: Lintable | None = None, + rule: BaseRule = RuntimeErrorRule(), + tag: str | None = None, # optional fine-graded tag + ) -> None: + """Initialize a MatchError instance.""" + super().__init__(message) + + if rule.__class__ is RuntimeErrorRule and not message: + raise TypeError( + f"{self.__class__.__name__}() missing a " + "required argument: one of 'message' or 'rule'", + ) + + self.message = str(message or getattr(rule, "shortdesc", "")) + + # Safety measure to ensure we do not end-up with incorrect indexes + if linenumber == 0: # pragma: no cover + raise RuntimeError( + "MatchError called incorrectly as line numbers start with 1" + ) + if column == 0: # pragma: no cover + raise RuntimeError( + "MatchError called incorrectly as column numbers start with 1" + ) + + self.linenumber = linenumber + self.column = column + self.details = details + self.filename = "" + if filename: + if isinstance(filename, Lintable): + self.lintable = filename + self.filename = normpath(str(filename.path)) + else: + self.filename = normpath(filename) + self.lintable = Lintable(self.filename) + self.rule = rule + self.ignored = False # If set it will be displayed but not counted as failure + # This can be used by rules that can report multiple errors type, so + # we can still filter by them. + self.tag = tag or rule.id + + # optional indicator on how this error was found + self.match_type: str | None = None + # for task matches, save the normalized task object (useful for transforms) + self.task: dict[str, Any] | None = None + # path to the problem area, like: [0,"pre_tasks",3] for [0].pre_tasks[3] + self.yaml_path: list[int | str] = [] + # True when a transform has resolved this MatchError + self.fixed = False + + @functools.cached_property + def level(self) -> str: + """Return the level of the rule: error, warning or notice.""" + if {self.tag, self.rule.id, *self.rule.tags}.isdisjoint(options.warn_list): + return "error" + return "warning" + + def __repr__(self) -> str: + """Return a MatchError instance representation.""" + formatstr = "[{0}] ({1}) matched {2}:{3} {4}" + # note that `rule.id` can be int, str or even missing, as users + # can defined their own custom rules. + _id = getattr(self.rule, "id", "000") + + return formatstr.format( + _id, self.message, self.filename, self.linenumber, self.details + ) + + @property + def position(self) -> str: + """Return error positioning, with column number if available.""" + if self.column: + return f"{self.linenumber}:{self.column}" + return str(self.linenumber) + + @property + def _hash_key(self) -> Any: + # line attr is knowingly excluded, as dict is not hashable + return ( + self.filename, + self.linenumber, + str(getattr(self.rule, "id", 0)), + self.message, + self.details, + # -1 is used here to force errors with no column to sort before + # all other errors. + -1 if self.column is None else self.column, + ) + + def __lt__(self, other: object) -> bool: + """Return whether the current object is less than the other.""" + if not isinstance(other, self.__class__): + return NotImplemented + return bool(self._hash_key < other._hash_key) + + def __hash__(self) -> int: + """Return a hash value of the MatchError instance.""" + return hash(self._hash_key) + + def __eq__(self, other: object) -> bool: + """Identify whether the other object represents the same rule match.""" + if not isinstance(other, self.__class__): + return NotImplemented + return self.__hash__() == other.__hash__() diff --git a/src/ansiblelint/file_utils.py b/src/ansiblelint/file_utils.py new file mode 100644 index 0000000..500dff7 --- /dev/null +++ b/src/ansiblelint/file_utils.py @@ -0,0 +1,523 @@ +"""Utility functions related to file operations.""" +from __future__ import annotations + +import copy +import logging +import os +import pathlib +import subprocess +import sys +from argparse import Namespace +from collections import OrderedDict, defaultdict +from contextlib import contextmanager +from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Iterator, cast + +import wcmatch.pathlib +from wcmatch.wcmatch import RECURSIVE, WcMatch +from yaml.error import YAMLError + +from ansiblelint.config import BASE_KINDS, options +from ansiblelint.constants import GIT_CMD, FileType, States + +if TYPE_CHECKING: + # https://github.com/PyCQA/pylint/issues/3979 + BasePathLike = os.PathLike[Any] # pylint: disable=unsubscriptable-object +else: + BasePathLike = os.PathLike + +_logger = logging.getLogger(__package__) + + +def abspath(path: str, base_dir: str) -> str: + """Make relative path absolute relative to given directory. + + Args: + path (str): the path to make absolute + base_dir (str): the directory from which make \ + relative paths absolute + """ + if not os.path.isabs(path): + # Don't use abspath as it assumes path is relative to cwd. + # We want it relative to base_dir. + path = os.path.join(base_dir, path) + + return os.path.normpath(path) + + +def normpath(path: str | BasePathLike) -> str: + """ + Normalize a path in order to provide a more consistent output. + + Currently it generates a relative path but in the future we may want to + make this user configurable. + """ + # prevent possible ValueError with relpath(), when input is an empty string + if not path: + path = "." + # conversion to string in order to allow receiving non string objects + relpath = os.path.relpath(str(path)) + path_absolute = os.path.abspath(str(path)) + if path_absolute.startswith(os.getcwd()): + return relpath + if path_absolute.startswith(os.path.expanduser("~")): + return path_absolute.replace(os.path.expanduser("~"), "~") + # we avoid returning relative paths that end-up at root level + if path_absolute in relpath: + return path_absolute + if relpath.startswith("../"): + return path_absolute + return relpath + + +# That is needed for compatibility with py38, later was added to Path class +def is_relative_to(path: Path, *other: Any) -> bool: + """Return True if the path is relative to another path or False.""" + try: + path.resolve().absolute().relative_to(*other) + return True + except ValueError: + return False + + +def normpath_path(path: str | BasePathLike) -> Path: + """Normalize a path in order to provide a more consistent output. + + - Any symlinks are resolved. + - Any paths outside the CWD are resolved to their absolute path. + - Any absolute path within current user home directory is compressed to + make use of '~', so it is easier to read and more portable. + """ + if not isinstance(path, Path): + path = Path(path) + + is_relative = is_relative_to(path, path.cwd()) + path = path.resolve() + if is_relative: + path = path.relative_to(path.cwd()) + + # Compress any absolute path within current user home directory + if path.is_absolute(): + home = Path.home() + if is_relative_to(path, home): + path = Path("~") / path.relative_to(home) + + return path + + +@contextmanager +def cwd(path: str | BasePathLike) -> Iterator[None]: + """Context manager for temporary changing current working directory.""" + old_pwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(old_pwd) + + +def expand_path_vars(path: str) -> str: + """Expand the environment or ~ variables in a path string.""" + # It may be possible for function to be called with a Path object + path = str(path).strip() + path = os.path.expanduser(path) + path = os.path.expandvars(path) + return path + + +def expand_paths_vars(paths: list[str]) -> list[str]: + """Expand the environment or ~ variables in a list.""" + paths = [expand_path_vars(p) for p in paths] + return paths + + +def kind_from_path(path: Path, base: bool = False) -> FileType: + """Determine the file kind based on its name. + + When called with base=True, it will return the base file type instead + of the explicit one. That is expected to return 'yaml' for any yaml files. + """ + # pathlib.Path.match patterns are very limited, they do not support *a*.yml + # glob.glob supports **/foo.yml but not multiple extensions + pathex = wcmatch.pathlib.PurePath(str(path.absolute().resolve())) + kinds = options.kinds if not base else BASE_KINDS + for entry in kinds: + for k, v in entry.items(): + if pathex.globmatch( + v, + flags=( + wcmatch.pathlib.GLOBSTAR + | wcmatch.pathlib.BRACE + | wcmatch.pathlib.DOTGLOB + ), + ): + return str(k) # type: ignore + + if base: + # Unknown base file type is default + return "" + + if path.is_dir(): + return "role" + + if str(path) == "/dev/stdin": + return "playbook" + + # Unknown file types report a empty string (evaluated as False) + return "" + + +# pylint: disable=too-many-instance-attributes +class Lintable: + """Defines a file/folder that can be linted. + + Providing file content when creating the object allow creation of in-memory + instances that do not need files to be present on disk. + + When symlinks are given, they will always be resolved to their target. + """ + + def __init__( + self, + name: str | Path, + content: str | None = None, + kind: FileType | None = None, + base_kind: str = "", + ): + """Create a Lintable instance.""" + self.dir: str = "" + self.kind: FileType | None = None + self.stop_processing = False # Set to stop other rules from running + self._data: Any = States.NOT_LOADED + self.line_skips: dict[int, set[str]] = defaultdict(set) + self.exc: Exception | None = None # Stores data loading exceptions + + if isinstance(name, str): + name = Path(name) + is_relative = is_relative_to(name, str(name.cwd())) + name = name.resolve() + if is_relative: + name = name.relative_to(name.cwd()) + name = normpath_path(name) + self.path = name + # Filename is effective file on disk, for stdin is a namedtempfile + self.name = self.filename = str(name) + + self._content = self._original_content = content + self.updated = False + + # if the lintable is part of a role, we save role folder name + self.role = "" + parts = self.path.parent.parts + if "roles" in parts: + role = self.path + while role.parent.name != "roles" and role.name: + role = role.parent + if role.exists(): + self.role = role.name + + if str(self.path) in ["/dev/stdin", "-"]: + # pylint: disable=consider-using-with + self.file = NamedTemporaryFile(mode="w+", suffix="playbook.yml") + self.filename = self.file.name + self._content = sys.stdin.read() + self.file.write(self._content) + self.file.flush() + self.path = Path(self.file.name) + self.name = "stdin" + self.kind = "playbook" + self.dir = "/" + else: + self.kind = kind or kind_from_path(self.path) + # We store absolute directory in dir + if not self.dir: + if self.kind == "role": + self.dir = str(self.path.resolve()) + else: + self.dir = str(self.path.parent.resolve()) + + # determine base file kind (yaml, xml, ini, ...) + self.base_kind = base_kind or kind_from_path(self.path, base=True) + self.abspath = self.path.expanduser().absolute() + + if self.kind == "yaml": + self.data # pylint: disable=pointless-statement + + def _guess_kind(self) -> None: + if self.kind == "yaml": + if isinstance(self.data, list) and "hosts" in self.data[0]: + if "rules" not in self.data[0]: + self.kind = "playbook" + else: + self.kind = "rulebook" + # we we failed to guess the more specific kind, we warn user + if self.kind == "yaml": + _logger.debug( + "Passed '%s' positional argument was identified as generic '%s' file kind.", + self.name, + self.kind, + ) + + def __getitem__(self, key: Any) -> Any: + """Provide compatibility subscriptable support.""" + if key == "path": + return str(self.path) + if key == "type": + return str(self.kind) + raise NotImplementedError() + + def get(self, key: Any, default: Any = None) -> Any: + """Provide compatibility subscriptable support.""" + try: + return self[key] + except NotImplementedError: + return default + + def _populate_content_cache_from_disk(self) -> None: + # Can raise UnicodeDecodeError + try: + self._content = self.path.expanduser().resolve().read_text(encoding="utf-8") + except FileNotFoundError as ex: + if vars(options).get("progressive"): + self._content = "" + else: + raise ex + if self._original_content is None: + self._original_content = self._content + + @property + def content(self) -> str: + """Retrieve file content, from internal cache or disk.""" + if self._content is None: + self._populate_content_cache_from_disk() + return cast(str, self._content) + + @content.setter + def content(self, value: str) -> None: + """Update ``content`` and calculate ``updated``. + + To calculate ``updated`` this will read the file from disk if the cache + has not already been populated. + """ + if not isinstance(value, str): + raise TypeError(f"Expected str but got {type(value)}") + if self._original_content is None: + if self._content is not None: + self._original_content = self._content + elif self.path.exists(): + self._populate_content_cache_from_disk() + else: + # new file + self._original_content = "" + self.updated = self._original_content != value + self._content = value + + @content.deleter + def content(self) -> None: + """Reset the internal content cache.""" + self._content = None + + def write(self, force: bool = False) -> None: + """Write the value of ``Lintable.content`` to disk. + + This only writes to disk if the content has been updated (``Lintable.updated``). + For example, you can update the content, and then write it to disk like this: + + .. code:: python + + lintable.content = new_content + lintable.write() + + Use ``force=True`` when you want to force a content rewrite even if the + content has not changed. For example: + + .. code:: python + + lintable.write(force=True) + """ + if not force and not self.updated: + # No changes to write. + return + self.path.expanduser().resolve().write_text( + self._content or "", encoding="utf-8" + ) + + def __hash__(self) -> int: + """Return a hash value of the lintables.""" + return hash((self.name, self.kind, self.abspath)) + + def __eq__(self, other: object) -> bool: + """Identify whether the other object represents the same rule match.""" + if isinstance(other, Lintable): + return bool(self.name == other.name and self.kind == other.kind) + return False + + def __repr__(self) -> str: + """Return user friendly representation of a lintable.""" + return f"{self.name} ({self.kind})" + + @property + def data(self) -> Any: + """Return loaded data representation for current file, if possible.""" + if self._data == States.NOT_LOADED: + if self.path.is_dir(): + self._data = None + return self._data + try: + if str(self.base_kind) == "text/yaml": + from ansiblelint.utils import ( # pylint: disable=import-outside-toplevel + parse_yaml_linenumbers, + ) + + self._data = parse_yaml_linenumbers(self) + # now that _data is not empty, we can try guessing if playbook or rulebook + # it has to be done before append_skipped_rules() call as it's relying + # on self.kind. + if self.kind == "yaml": + self._guess_kind() + # Lazy import to avoid delays and cyclic-imports + if "append_skipped_rules" not in globals(): + # pylint: disable=import-outside-toplevel + from ansiblelint.skip_utils import append_skipped_rules + + self._data = append_skipped_rules(self._data, self) + else: + logging.debug( + "data set to None for %s due to being of %s kind.", + self.path, + self.base_kind, + ) + self._data = States.UNKNOWN_DATA + + except (RuntimeError, FileNotFoundError, YAMLError) as exc: + self._data = States.LOAD_FAILED + self.exc = exc + return self._data + + +# pylint: disable=redefined-outer-name +def discover_lintables(options: Namespace) -> dict[str, Any]: + """Find all files that we know how to lint. + + Return format is normalized, relative for stuff below cwd, ~/ for content + under current user and absolute for everything else. + """ + # git is preferred as it also considers .gitignore + git_command_present = [ + *GIT_CMD, + "ls-files", + "--cached", + "--others", + "--exclude-standard", + "-z", + ] + git_command_absent = [*GIT_CMD, "ls-files", "--deleted", "-z"] + out = None + + try: + out_present = subprocess.check_output( + git_command_present, stderr=subprocess.STDOUT, text=True + ).split("\x00")[:-1] + _logger.info( + "Discovered files to lint using: %s", " ".join(git_command_present) + ) + + out_absent = subprocess.check_output( + git_command_absent, stderr=subprocess.STDOUT, text=True + ).split("\x00")[:-1] + _logger.info("Excluded removed files using: %s", " ".join(git_command_absent)) + + out = set(out_present) - set(out_absent) + except subprocess.CalledProcessError as exc: + if not (exc.returncode == 128 and "fatal: not a git repository" in exc.output): + _logger.warning( + "Failed to discover lintable files using git: %s", + exc.output.rstrip("\n"), + ) + except FileNotFoundError as exc: + if options.verbosity: + _logger.warning("Failed to locate command: %s", exc) + + if out is None: + exclude_pattern = "|".join(str(x) for x in options.exclude_paths) + _logger.info("Looking up for files, excluding %s ...", exclude_pattern) + # remove './' prefix from output of WcMatch + out = { + strip_dotslash_prefix(fname) + for fname in WcMatch( + ".", exclude_pattern=exclude_pattern, flags=RECURSIVE, limit=256 + ).match() + } + + return OrderedDict.fromkeys(sorted(out)) + + +def strip_dotslash_prefix(fname: str) -> str: + """Remove ./ leading from filenames.""" + return fname[2:] if fname.startswith("./") else fname + + +def guess_project_dir(config_file: str | None) -> str: + """Return detected project dir or current working directory.""" + path = None + if config_file is not None and config_file != "/dev/null": + target = pathlib.Path(config_file) + if target.exists(): + # for config inside .config, we return the parent dir as project dir + cfg_path = target.parent + if cfg_path.parts[-1] == ".config": + path = str(cfg_path.parent.absolute()) + else: + path = str(cfg_path.absolute()) + + if path is None: + try: + result = subprocess.run( + [*GIT_CMD, "rev-parse", "--show-toplevel"], + capture_output=True, + text=True, + check=True, + ) + + path = result.stdout.splitlines()[0] + except subprocess.CalledProcessError as exc: + if not ( + exc.returncode == 128 and "fatal: not a git repository" in exc.stderr + ): + _logger.warning( + "Failed to guess project directory using git: %s", + exc.stderr.rstrip("\n"), + ) + except FileNotFoundError as exc: + _logger.warning("Failed to locate command: %s", exc) + + if path is None: + path = os.getcwd() + + _logger.info( + "Guessed %s as project root directory", + path, + ) + + return path + + +def expand_dirs_in_lintables(lintables: set[Lintable]) -> None: + """Return all recognized lintables within given directory.""" + should_expand = False + + for item in lintables: + if item.path.is_dir(): + should_expand = True + break + + if should_expand: + # this relies on git and we do not want to call unless needed + all_files = discover_lintables(options) + + for item in copy.copy(lintables): + if item.path.is_dir(): + for filename in all_files: + if filename.startswith(str(item.path)): + lintables.add(Lintable(filename)) diff --git a/src/ansiblelint/formatters/__init__.py b/src/ansiblelint/formatters/__init__.py new file mode 100644 index 0000000..2914f6d --- /dev/null +++ b/src/ansiblelint/formatters/__init__.py @@ -0,0 +1,315 @@ +"""Output formatters.""" +from __future__ import annotations + +import hashlib +import json +import os +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Generic, List, Tuple, TypeVar, Union + +import rich + +from ansiblelint.config import options +from ansiblelint.version import __version__ + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + +T = TypeVar("T", bound="BaseFormatter") # type: ignore + + +class BaseFormatter(Generic[T]): + """Formatter of ansible-lint output. + + Base class for output formatters. + + Args: + base_dir (str|Path): reference directory against which display relative path. + display_relative_path (bool): whether to show path as relative or absolute + """ + + def __init__(self, base_dir: str | Path, display_relative_path: bool) -> None: + """Initialize a BaseFormatter instance.""" + if isinstance(base_dir, str): + base_dir = Path(base_dir) + if base_dir: # can be None + base_dir = base_dir.absolute() + + self._base_dir = base_dir if display_relative_path else None + + def _format_path(self, path: str | Path) -> str | Path: + if not self._base_dir or not path: + return path + # Use os.path.relpath 'cause Path.relative_to() misbehaves + return os.path.relpath(path, start=self._base_dir) + + def format(self, match: MatchError) -> str: + """Format a match error.""" + return str(match) + + @staticmethod + def escape(text: str) -> str: + """Escapes a string to avoid processing it as markup.""" + return rich.markup.escape(text) + + +class Formatter(BaseFormatter): # type: ignore + """Default output formatter of ansible-lint.""" + + def format(self, match: MatchError) -> str: + _id = getattr(match.rule, "id", "000") + result = f"[{match.level}][bold][link={match.rule.url}]{self.escape(match.tag)}[/link][/][/][dim]:[/] [{match.level}]{self.escape(match.message)}[/]" + if match.level != "error": + result += f" [dim][{match.level}]({match.level})[/][/]" + if match.ignored: + result += " [dim]# ignored[/]" + result += ( + "\n" + f"[filename]{self._format_path(match.filename or '')}[/]:{match.position}" + ) + if match.details: + result += f" [dim]{self.escape(str(match.details))}[/]" + result += "\n" + return result + + +class QuietFormatter(BaseFormatter[Any]): + """Brief output formatter for ansible-lint.""" + + def format(self, match: MatchError) -> str: + return ( + f"[{match.level}]{match.rule.id}[/] " + f"[filename]{self._format_path(match.filename or '')}[/]:{match.position}" + ) + + +class ParseableFormatter(BaseFormatter[Any]): + """Parseable uses PEP8 compatible format.""" + + def format(self, match: MatchError) -> str: + result = ( + f"[filename]{self._format_path(match.filename or '')}[/][dim]:{match.position}:[/] " + f"[{match.level}][bold]{self.escape(match.tag)}[/bold]" + f"{ f': {match.message}' if not options.quiet else '' }[/]" + ) + if match.level != "error": + result += f" [dim][{match.level}]({match.level})[/][/]" + + return result + + +class AnnotationsFormatter(BaseFormatter): # type: ignore + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message + """Formatter for emitting violations as GitHub Workflow Commands. + + These commands trigger the GHA Workflow runners platform to post violations + in a form of GitHub Checks API annotations that appear rendered in pull- + request files view. + + ::debug file={name},line={line},col={col},severity={severity}::{message} + ::warning file={name},line={line},col={col},severity={severity}::{message} + ::error file={name},line={line},col={col},severity={severity}::{message} + + Supported levels: debug, warning, error + """ + + def format(self, match: MatchError) -> str: + """Prepare a match instance for reporting as a GitHub Actions annotation.""" + file_path = self._format_path(match.filename or "") + line_num = match.linenumber + severity = match.rule.severity + violation_details = self.escape(match.message) + if match.column: + col = f",col={match.column}" + else: + col = "" + return ( + f"::{match.level} file={file_path},line={line_num}{col},severity={severity},title={match.tag}" + f"::{violation_details}" + ) + + +class CodeclimateJSONFormatter(BaseFormatter[Any]): + """Formatter for emitting violations in Codeclimate JSON report format. + + The formatter expects a list of MatchError objects and returns a JSON formatted string. + The spec for the codeclimate report can be found here: + https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#user-content-data-types + """ + + def format_result(self, matches: list[MatchError]) -> str: + """Format a list of match errors as a JSON string.""" + if not isinstance(matches, list): + raise RuntimeError( + f"The {self.__class__} was expecting a list of MatchError." + ) + + result = [] + for match in matches: + issue: dict[str, Any] = {} + issue["type"] = "issue" + issue["check_name"] = match.tag or match.rule.id # rule-id[subrule-id] + issue["categories"] = match.rule.tags + if match.rule.url: + # https://github.com/codeclimate/platform/issues/68 + issue["url"] = match.rule.url + issue["severity"] = self._remap_severity(match) + # level is not part of CodeClimate specification, but there is + # no other way to expose that info. We recommend switching to + # SARIF format which is better suited for interoperability. + issue["level"] = match.level + issue["description"] = self.escape(str(match.message)) + issue["fingerprint"] = hashlib.sha256( + repr(match).encode("utf-8") + ).hexdigest() + issue["location"] = {} + issue["location"]["path"] = self._format_path(match.filename or "") + if match.column: + issue["location"]["positions"] = {} + issue["location"]["positions"]["begin"] = {} + issue["location"]["positions"]["begin"]["line"] = match.linenumber + issue["location"]["positions"]["begin"]["column"] = match.column + else: + issue["location"]["lines"] = {} + issue["location"]["lines"]["begin"] = match.linenumber + if match.details: + issue["content"] = {} + issue["content"]["body"] = match.details + # Append issue to result list + result.append(issue) + + return json.dumps(result) + + @staticmethod + def _remap_severity(match: MatchError) -> str: + severity = match.rule.severity + + if severity in ["LOW"]: + return "minor" + if severity in ["MEDIUM"]: + return "major" + if severity in ["HIGH"]: + return "critical" + if severity in ["VERY_HIGH"]: + return "blocker" + # VERY_LOW, INFO or anything else + return "info" + + +class SarifFormatter(BaseFormatter[Any]): + """Formatter for emitting violations in SARIF report format. + + The spec of SARIF can be found here: + https://docs.oasis-open.org/sarif/sarif/v2.1.0/ + """ + + BASE_URI_ID = "SRCROOT" + TOOL_NAME = "ansible-lint" + TOOL_URL = "https://github.com/ansible/ansible-lint" + SARIF_SCHEMA_VERSION = "2.1.0" + SARIF_SCHEMA = ( + "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.5.json" + ) + + def format_result(self, matches: list[MatchError]) -> str: + """Format a list of match errors as a JSON string.""" + if not isinstance(matches, list): + raise RuntimeError( + f"The {self.__class__} was expecting a list of MatchError." + ) + + root_path = Path(str(self._base_dir)).as_uri() + root_path = root_path + "/" if not root_path.endswith("/") else root_path + rules, results = self._extract_results(matches) + + tool = { + "driver": { + "name": self.TOOL_NAME, + "version": __version__, + "informationUri": self.TOOL_URL, + "rules": rules, + } + } + + runs = [ + { + "tool": tool, + "columnKind": "utf16CodeUnits", + "results": results, + "originalUriBaseIds": { + self.BASE_URI_ID: {"uri": root_path}, + }, + } + ] + + report = { + "$schema": self.SARIF_SCHEMA, + "version": self.SARIF_SCHEMA_VERSION, + "runs": runs, + } + + return json.dumps( + report, default=lambda o: o.__dict__, sort_keys=False, indent=2 + ) + + def _extract_results( + self, matches: list[MatchError] + ) -> tuple[list[Any], list[Any]]: + rules = {} + results = [] + for match in matches: + if match.tag not in rules: + rules[match.tag] = self._to_sarif_rule(match) + results.append(self._to_sarif_result(match)) + return list(rules.values()), results + + def _to_sarif_rule(self, match: MatchError) -> dict[str, Any]: + rule: dict[str, Any] = { + "id": match.tag, + "name": match.tag, + "shortDescription": { + "text": str(match.message), + }, + "defaultConfiguration": { + "level": self._to_sarif_level(match), + }, + "help": { + "text": str(match.rule.description), + }, + "helpUri": match.rule.url, + "properties": {"tags": match.rule.tags}, + } + if match.rule.link: + rule["helpUri"] = match.rule.link + return rule + + def _to_sarif_result(self, match: MatchError) -> dict[str, Any]: + result: dict[str, Any] = { + "ruleId": match.tag, + "message": { + "text": str(match.message), + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": self._format_path(match.filename or ""), + "uriBaseId": self.BASE_URI_ID, + }, + "region": { + "startLine": match.linenumber, + }, + }, + }, + ], + } + if match.column: + result["locations"][0]["physicalLocation"]["region"][ + "startColumn" + ] = match.column + return result + + @staticmethod + def _to_sarif_level(match: MatchError) -> str: + # sarif accepts only 4 levels: error, warning, note, none + return match.level diff --git a/src/ansiblelint/generate_docs.py b/src/ansiblelint/generate_docs.py new file mode 100644 index 0000000..2e518d1 --- /dev/null +++ b/src/ansiblelint/generate_docs.py @@ -0,0 +1,176 @@ +"""Utils to generate rules documentation.""" +import logging +from pathlib import Path +from typing import Iterable + +from rich import box +from rich.console import RenderableType + +# Remove this compatibility try-catch block once we drop support for rich < 10.7.0 +try: + from rich.console import group +except ImportError: + from rich.console import render_group as group # type: ignore + +from rich.markdown import Markdown +from rich.table import Table + +from ansiblelint.config import PROFILES +from ansiblelint.constants import RULE_DOC_URL +from ansiblelint.rules import RulesCollection + +DOC_HEADER = """ +# Default Rules + +(lint_default_rules)= + +Below you can see the list of default rules Ansible Lint use to evaluate playbooks and roles: + +""" + +_logger = logging.getLogger(__name__) + + +def rules_as_docs(rules: RulesCollection) -> str: + """Dump documentation files for all rules, returns only confirmation message. + + That is internally used for building documentation and the API can change + at any time. + """ + result = "" + dump_path = Path(".") / "docs" / "rules" + if not dump_path.exists(): + raise RuntimeError(f"Failed to find {dump_path} folder for dumping rules.") + + with open(dump_path / ".." / "profiles.md", "w", encoding="utf-8") as f: + f.write(profiles_as_md(header=True, docs_url="rules/")) + + for rule in rules.alphabetical(): + result = "" + with open(dump_path / f"{rule.id}.md", "w", encoding="utf-8") as f: + # because title == rule.id we get the desired labels for free + # and we do not have to insert `(target_header)=` + title = f"{rule.id}" + + if rule.help: + if not rule.help.startswith(f"# {rule.id}"): + raise RuntimeError( + f"Rule {rule.__class__} markdown help does not start with `# {rule.id}` header.\n{rule.help}" + ) + result = result[1:] + result += f"{rule.help}" + else: + description = rule.description + if rule.link: + description += f" [more]({rule.link})" + + result += f"# {title}\n\n**{rule.shortdesc}**\n\n{description}" + result = result.strip() + "\n" + f.write(result) + + return "All markdown files for rules were dumped!" + + +def rules_as_str(rules: RulesCollection) -> RenderableType: + """Return rules as string.""" + table = Table(show_header=False, header_style="title", box=box.SIMPLE) + for rule in rules.alphabetical(): + if rule.tags: + tag = f"[dim] ({', '.join(rule.tags)})[/dim]" + else: + tag = "" + table.add_row( + f"[link={RULE_DOC_URL}{rule.id}/]{rule.id}[/link]", rule.shortdesc + tag + ) + return table + + +def rules_as_md(rules: RulesCollection) -> str: + """Return md documentation for a list of rules.""" + result = DOC_HEADER + + for rule in rules.alphabetical(): + # because title == rule.id we get the desired labels for free + # and we do not have to insert `(target_header)=` + title = f"{rule.id}" + + if rule.help: + if not rule.help.startswith(f"# {rule.id}"): + raise RuntimeError( + f"Rule {rule.__class__} markdown help does not start with `# {rule.id}` header.\n{rule.help}" + ) + result += f"\n\n{rule.help}" + else: + description = rule.description + if rule.link: + description += f" [more]({rule.link})" + + result += f"\n\n## {title}\n\n**{rule.shortdesc}**\n\n{description}" + + return result + + +@group() +def rules_as_rich(rules: RulesCollection) -> Iterable[Table]: + """Print documentation for a list of rules, returns empty string.""" + width = max(16, *[len(rule.id) for rule in rules]) + for rule in rules.alphabetical(): + table = Table(show_header=True, header_style="title", box=box.MINIMAL) + table.add_column(rule.id, style="dim", width=width) + table.add_column(Markdown(rule.shortdesc)) + + description = rule.help or rule.description + if rule.link: + description += f" [(more)]({rule.link})" + table.add_row("description", Markdown(description)) + if rule.version_added: + table.add_row("version_added", rule.version_added) + if rule.tags: + table.add_row("tags", ", ".join(rule.tags)) + if rule.severity: + table.add_row("severity", rule.severity) + yield table + + +def profiles_as_md(header: bool = False, docs_url: str = RULE_DOC_URL) -> str: + """Return markdown representation of supported profiles.""" + result = "" + + if header: + result += """<!--- +Do not manually edit, generated from generate_docs.py +--> +# Profiles + +Ansible-lint profiles gradually increase the strictness of rules as your Ansible content lifecycle. + +!!! note + + Rules with `*` in the suffix are not yet implemented but are documented with linked GitHub issues. + +""" + + for name, profile in PROFILES.items(): + extends = "" + if profile.get("extends", None): + extends = ( + f" It extends [{profile['extends']}](#{profile['extends']}) profile." + ) + result += f"## {name}\n\n{profile['description']}{extends}\n" + for rule, rule_data in profile["rules"].items(): + if "[" in rule: + url = f"{docs_url}{rule.split('[')[0]}/" + else: + url = f"{docs_url}{rule}/" + if not rule_data: + result += f"- [{rule}]({url})\n" + else: + result += f"- [{rule}]({rule_data['url']})\n" + + result += "\n" + return result + + +def profiles_as_rich() -> Markdown: + """Return rich representation of supported profiles.""" + return Markdown(profiles_as_md()) diff --git a/src/ansiblelint/loaders.py b/src/ansiblelint/loaders.py new file mode 100644 index 0000000..6108701 --- /dev/null +++ b/src/ansiblelint/loaders.py @@ -0,0 +1,57 @@ +"""Utilities for loading various files.""" +from __future__ import annotations + +import logging +import os +from collections import defaultdict +from functools import partial +from pathlib import Path +from typing import Any + +import yaml +from yaml import YAMLError + +try: + from yaml import CFullLoader as FullLoader + from yaml import CSafeLoader as SafeLoader +except (ImportError, AttributeError): + from yaml import FullLoader, SafeLoader # type: ignore + +IGNORE_TXT = ".ansible-lint-ignore" +yaml_load = partial(yaml.load, Loader=FullLoader) +yaml_load_safe = partial(yaml.load, Loader=SafeLoader) +_logger = logging.getLogger(__name__) + + +def yaml_from_file(filepath: str | Path) -> Any: + """Return a loaded YAML file.""" + with open(str(filepath), encoding="utf-8") as content: + return yaml_load(content) + + +def load_ignore_txt(filepath: str | Path = IGNORE_TXT) -> dict[str, set[str]]: + """Return a list of rules to ignore.""" + result = defaultdict(set) + if os.path.isfile(filepath): + with open(str(filepath), encoding="utf-8") as content: + _logger.debug("Loading ignores from %s", filepath) + for line in content: + entry = line.split("#")[0].rstrip() + if entry: + try: + path, rule = entry.split() + except ValueError as exc: + raise RuntimeError( + f"Unable to parse line '{line}' from {filepath} file." + ) from exc + result[path].add(rule) + return result + + +__all__ = [ + "load_ignore_txt", + "yaml_from_file", + "yaml_load", + "yaml_load_safe", + "YAMLError", +] diff --git a/src/ansiblelint/logger.py b/src/ansiblelint/logger.py new file mode 100644 index 0000000..18b36f4 --- /dev/null +++ b/src/ansiblelint/logger.py @@ -0,0 +1,18 @@ +"""Utils related to logging.""" +import logging +import time +from contextlib import contextmanager +from typing import Any, Iterator + +_logger = logging.getLogger(__name__) + + +@contextmanager +def timed_info(msg: Any, *args: Any) -> Iterator[None]: + """Context manager for logging slow operations, mentions duration.""" + start = time.time() + try: + yield + finally: + elapsed = time.time() - start + _logger.info(msg + " (%.2fs)", *(*args, elapsed)) diff --git a/src/ansiblelint/py.typed b/src/ansiblelint/py.typed new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/ansiblelint/py.typed diff --git a/src/ansiblelint/rules/__init__.py b/src/ansiblelint/rules/__init__.py new file mode 100644 index 0000000..5c87165 --- /dev/null +++ b/src/ansiblelint/rules/__init__.py @@ -0,0 +1,549 @@ +"""All internal ansible-lint rules.""" +from __future__ import annotations + +import copy +import inspect +import logging +import re +import sys +from argparse import Namespace +from collections import defaultdict +from functools import lru_cache +from importlib import import_module +from pathlib import Path +from typing import Any, Iterable, Iterator, MutableMapping, MutableSequence, cast + +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +import ansiblelint.skip_utils +import ansiblelint.utils +import ansiblelint.yaml_utils +from ansiblelint._internal.rules import ( + AnsibleParserErrorRule, + BaseRule, + LoadingFailureRule, + RuntimeErrorRule, + WarningRule, +) +from ansiblelint.config import PROFILES, get_rule_config +from ansiblelint.config import options as default_options +from ansiblelint.constants import LINE_NUMBER_KEY, RULE_DOC_URL, SKIPPED_RULES_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, expand_paths_vars + +_logger = logging.getLogger(__name__) + +match_types = { + "matchlines": "line", + "match": "line", # called by matchlines + "matchtasks": "task", + "matchtask": "task", # called by matchtasks + "matchyaml": "yaml", + "matchplay": "play", # called by matchyaml + "matchdir": "dir", +} + + +class AnsibleLintRule(BaseRule): + """AnsibleLintRule should be used as base for writing new rules.""" + + @property + def url(self) -> str: + """Return rule documentation url.""" + return RULE_DOC_URL + self.id + "/" + + @property + def rule_config(self) -> dict[str, Any]: + """Retrieve rule specific configuration.""" + return get_rule_config(self.id) + + @lru_cache(maxsize=256) + def get_config(self, key: str) -> Any: + """Return a configured value for given key string.""" + return self.rule_config.get(key, None) + + @staticmethod + def unjinja(text: str) -> str: + """Remove jinja2 bits from a string.""" + text = re.sub(r"{{.+?}}", "JINJA_EXPRESSION", text) + text = re.sub(r"{%.+?%}", "JINJA_STATEMENT", text) + text = re.sub(r"{#.+?#}", "JINJA_COMMENT", text) + return text + + # pylint: disable=too-many-arguments + def create_matcherror( + self, + message: str | None = None, + linenumber: int = 1, + details: str = "", + filename: Lintable | None = None, + tag: str = "", + ) -> MatchError: + """Instantiate a new MatchError.""" + match = MatchError( + message=message, + linenumber=linenumber, + details=details, + filename=filename, + rule=copy.copy(self), + ) + if tag: + match.tag = tag + # search through callers to find one of the match* methods + frame = inspect.currentframe() + match_type: str | None = None + while not match_type and frame is not None: + func_name = frame.f_code.co_name + match_type = match_types.get(func_name, None) + if match_type: + # add the match_type to the match + match.match_type = match_type + break + frame = frame.f_back # get the parent frame for the next iteration + return match + + @staticmethod + def _enrich_matcherror_with_task_details( + match: MatchError, task: dict[str, Any] + ) -> None: + match.task = task + if not match.details: + match.details = "Task/Handler: " + ansiblelint.utils.task_to_str(task) + if match.linenumber < task[LINE_NUMBER_KEY]: + match.linenumber = task[LINE_NUMBER_KEY] + + def matchlines(self, file: Lintable) -> list[MatchError]: + matches: list[MatchError] = [] + # arrays are 0-based, line numbers are 1-based + # so use prev_line_no as the counter + for prev_line_no, line in enumerate(file.content.split("\n")): + if line.lstrip().startswith("#"): + continue + + rule_id_list = ansiblelint.skip_utils.get_rule_skips_from_line(line) + if self.id in rule_id_list: + continue + + result = self.match(line) + if not result: + continue + message = None + if isinstance(result, str): + message = result + matcherror = self.create_matcherror( + message=message, + linenumber=prev_line_no + 1, + details=line, + filename=file, + ) + matches.append(matcherror) + return matches + + # pylint: disable=too-many-branches + def matchtasks(self, file: Lintable) -> list[MatchError]: # noqa: C901 + """Call matchtask for each task inside file and return aggregate results. + + Most rules will never need to override matchtasks because its main + purpose is to call matchtask for each task/handlers in the same file, + and to aggregate the results. + """ + matches: list[MatchError] = [] + if ( + file.kind not in ["handlers", "tasks", "playbook"] + or str(file.base_kind) != "text/yaml" + ): + return matches + + tasks_iterator = ansiblelint.yaml_utils.iter_tasks_in_file(file) + for raw_task, task, skipped_tags, error in tasks_iterator: + if error is not None: + # normalize_task converts AnsibleParserError to MatchError + return [error] + + if ( + self.id in skipped_tags + or ("action" not in task) + or "skip_ansible_lint" in task.get("tags", []) + ): + continue + + if self.needs_raw_task: + task["__raw_task__"] = raw_task + + result = self.matchtask(task, file=file) + if not result: + continue + + if isinstance(result, Iterable) and not isinstance( + result, str + ): # list[MatchError] + # https://github.com/PyCQA/pylint/issues/6044 + # pylint: disable=not-an-iterable + for match in result: + if match.tag in skipped_tags: + continue + self._enrich_matcherror_with_task_details(match, task) + matches.append(match) + continue + if isinstance(result, MatchError): + if result.tag in skipped_tags: + continue + match = result + else: # bool or string + message = None + if isinstance(result, str): + message = result + match = self.create_matcherror( + message=message, + linenumber=task[LINE_NUMBER_KEY], + filename=file, + ) + + self._enrich_matcherror_with_task_details(match, task) + matches.append(match) + return matches + + def matchyaml(self, file: Lintable) -> list[MatchError]: + matches: list[MatchError] = [] + if str(file.base_kind) != "text/yaml": + return matches + + yaml = file.data + # yaml returned can be an AnsibleUnicode (a string) when the yaml + # file contains a single string. YAML spec allows this but we consider + # this an fatal error. + if isinstance(yaml, str): + if yaml.startswith("$ANSIBLE_VAULT"): + return [] + return [MatchError(filename=file, rule=LoadingFailureRule())] + if not yaml: + return matches + + if isinstance(yaml, dict): + yaml = [yaml] + + for play in yaml: + # Bug #849 + if play is None: + continue + + if self.id in play.get(SKIPPED_RULES_KEY, ()): + continue + + if "skip_ansible_lint" in play.get("tags", []): + continue + + matches.extend(self.matchplay(file, play)) + + return matches + + +class TransformMixin: + """A mixin for AnsibleLintRule to enable transforming files. + + If ansible-lint is started with the ``--write`` option, then the ``Transformer`` + will call the ``transform()`` method for every MatchError identified if the rule + that identified it subclasses this ``TransformMixin``. Only the rule that identified + a MatchError can do transforms to fix that match. + """ + + def transform( + self, + match: MatchError, + lintable: Lintable, + data: CommentedMap | CommentedSeq | str, + ) -> None: + """Transform ``data`` to try to fix the MatchError identified by this rule. + + The ``match`` was generated by this rule in the ``lintable`` file. + When ``transform()`` is called on a rule, the rule should either fix the + issue, if possible, or make modifications that make it easier to fix manually. + + The transform must set ``match.fixed = True`` when data has been transformed to + fix the error. + + For YAML files, ``data`` is an editable YAML dict/array that preserves + any comments that were in the original file. + + .. code:: python + + data[0]["tasks"][0]["when"] = False + + This is easier with the ``seek()`` utility method: + + .. code :: python + + target_task = self.seek(match.yaml_path, data) + target_task["when"] = False + + For any files that aren't YAML, ``data`` is the loaded file's content as a string. + To edit non-YAML files, save the updated contents in ``lintable.content``: + + .. code:: python + + new_data = self.do_something_to_fix_the_match(data) + lintable.content = new_data + """ + + @staticmethod + def seek( + yaml_path: list[int | str], + data: MutableMapping[str, Any] | MutableSequence[Any] | str, + ) -> Any: + """Get the element identified by ``yaml_path`` in ``data``. + + Rules that work with YAML need to seek, or descend, into nested YAML data + structures to perform the relevant transforms. For example: + + .. code:: python + + def transform(self, match, lintable, data): + target_task = self.seek(match.yaml_path, data) + # transform target_task + """ + if isinstance(data, str): + # can't descend into a string + return data + target = data + for segment in yaml_path: + # The cast() calls tell mypy what types we expect. + # Essentially this does: + # target = target[segment] + if isinstance(segment, str): + target = cast(MutableMapping[str, Any], target)[segment] + elif isinstance(segment, int): + target = cast(MutableSequence[Any], target)[segment] + return target + + +# pylint: disable=too-many-nested-blocks +def load_plugins( # noqa: max-complexity: 11 + dirs: list[str], +) -> Iterator[AnsibleLintRule]: + """Yield a rule class.""" + + def all_subclasses(cls: type) -> set[type]: + return set(cls.__subclasses__()).union( + [s for c in cls.__subclasses__() for s in all_subclasses(c)] + ) + + orig_sys_path = sys.path.copy() + + for directory in dirs: + if directory not in sys.path: + sys.path.append(str(directory)) + + # load all modules in the directory + for f in Path(directory).glob("*.py"): + if "__" not in f.stem and f.stem not in "conftest": + import_module(f"{f.stem}") + # restore sys.path + sys.path = orig_sys_path + + rules: dict[str, BaseRule] = {} + for rule in all_subclasses(BaseRule): + # we do not return the rules that are not loaded from passed 'directory' + # or rules that do not have a valid id. For example, during testing + # python may load other rule classes, some outside the tested rule + # directories. + if getattr(rule, "id") and Path(inspect.getfile(rule)).parent.absolute() in [ + Path(x).absolute() for x in dirs + ]: + if issubclass(rule, BaseRule) and rule.id not in rules: + rules[rule.id] = rule() + for rule in rules.values(): # type: ignore + if isinstance(rule, AnsibleLintRule) and bool(rule.id): + yield rule + + +class RulesCollection: + """Container for a collection of rules.""" + + def __init__( + self, + rulesdirs: list[str] | None = None, + options: Namespace = default_options, + profile_name: str | None = None, + conditional: bool = True, + ) -> None: + """Initialize a RulesCollection instance.""" + self.options = options + self.profile = [] + if profile_name: + self.profile = PROFILES[profile_name] + if rulesdirs is None: + rulesdirs = [] + self.rulesdirs = expand_paths_vars(rulesdirs) + self.rules: list[BaseRule] = [] + # internal rules included in order to expose them for docs as they are + # not directly loaded by our rule loader. + self.rules.extend( + [ + RuntimeErrorRule(), + AnsibleParserErrorRule(), + LoadingFailureRule(), + WarningRule(), + ] + ) + for rule in load_plugins(rulesdirs): + self.register(rule, conditional=conditional) + self.rules = sorted(self.rules) + + # When we have a profile we unload some of the rules + # But we do include all rules when listing all rules or tags + if profile_name and not (self.options.list_rules or self.options.list_tags): + filter_rules_with_profile(self.rules, profile_name) + + def register(self, obj: AnsibleLintRule, conditional: bool = False) -> None: + """Register a rule.""" + # We skip opt-in rules which were not manually enabled. + # But we do include opt-in rules when listing all rules or tags + if any( + [ + not conditional, + self.profile, # when profile is used we load all rules and filter later + "opt-in" not in obj.tags, + obj.id in self.options.enable_list, + self.options.list_rules, + self.options.list_tags, + ] + ): + obj._collection = self # pylint: disable=protected-access + self.rules.append(obj) + + def __iter__(self) -> Iterator[BaseRule]: + """Return the iterator over the rules in the RulesCollection.""" + return iter(sorted(self.rules)) + + def alphabetical(self) -> Iterator[BaseRule]: + """Return an iterator over the rules in the RulesCollection in alphabetical order.""" + return iter(sorted(self.rules, key=lambda x: x.id)) + + def __len__(self) -> int: + """Return the length of the RulesCollection data.""" + return len(self.rules) + + def extend(self, more: list[AnsibleLintRule]) -> None: + """Combine rules.""" + self.rules.extend(more) + + def run( # noqa: max-complexity: 12 + self, + file: Lintable, + tags: set[str] | None = None, + skip_list: list[str] | None = None, + ) -> list[MatchError]: + """Run all the rules against the given lintable.""" + matches: list[MatchError] = [] + if tags is None: + tags = set() + if skip_list is None: + skip_list = [] + + if not file.path.is_dir(): + try: + if file.content is not None: # loads the file content + pass + except (OSError, UnicodeDecodeError) as exc: + return [ + MatchError( + message=str(exc), + filename=file, + rule=LoadingFailureRule(), + tag=f"{LoadingFailureRule.id}[{exc.__class__.__name__.lower()}]", + ) + ] + + for rule in self.rules: + if rule.id == "syntax-check": + continue + if ( + not tags + or rule.has_dynamic_tags + or not set(rule.tags).union([rule.id]).isdisjoint(tags) + ): + rule_definition = set(rule.tags) + rule_definition.add(rule.id) + if set(rule_definition).isdisjoint(skip_list): + matches.extend(rule.getmatches(file)) + + # some rules can produce matches with tags that are inside our + # skip_list, so we need to cleanse the matches + matches = [m for m in matches if m.tag not in skip_list] + + return matches + + def __repr__(self) -> str: + """Return a RulesCollection instance representation.""" + return "\n".join( + [rule.verbose() for rule in sorted(self.rules, key=lambda x: x.id)] + ) + + def list_tags(self) -> str: + """Return a string with all the tags in the RulesCollection.""" + tag_desc = { + "command-shell": "Specific to use of command and shell modules", + "core": "Related to internal implementation of the linter", + "deprecations": "Indicate use of features that are removed from Ansible", + "experimental": "Newly introduced rules, by default triggering only warnings", + "formatting": "Related to code-style", + "idempotency": "Possible indication that consequent runs would produce different results", + "idiom": "Anti-pattern detected, likely to cause undesired behavior", + "metadata": "Invalid metadata, likely related to galaxy, collections or roles", + "opt-in": "Rules that are not used unless manually added to `enable_list`", + "security": "Rules related o potentially security issues, like exposing credentials", + "unpredictability": "Warn about code that might not work in a predictable way", + "unskippable": "Indicate a fatal error that cannot be ignored or disabled", + "yaml": "External linter which will also produce its own rule codes", + } + + tags = defaultdict(list) + for rule in self.rules: + for tag in rule.tags: + tags[tag].append(rule.id) + result = "# List of tags and rules they cover\n" + for tag in sorted(tags): + desc = tag_desc.get(tag, None) + if desc: + result += f"{tag}: # {desc}\n" + else: + result += f"{tag}:\n" + # result += f" rules:\n" + for name in tags[tag]: + result += f" - {name}\n" + return result + + +def filter_rules_with_profile(rule_col: list[BaseRule], profile: str) -> None: + """Unload rules that are not part of the specified profile.""" + included = set() + extends = profile + total_rules = len(rule_col) + while extends: + for rule in PROFILES[extends]["rules"]: + _logger.debug("Activating rule `%s` due to profile `%s`", rule, extends) + included.add(rule) + extends = PROFILES[extends].get("extends", None) + for rule in rule_col.copy(): + if rule.id not in included: + _logger.debug( + "Unloading %s rule due to not being part of %s profile.", + rule.id, + profile, + ) + rule_col.remove(rule) + else: + for tag in ("opt-in", "experimental"): + if tag in rule.tags: + _logger.debug( + "Removing tag `%s` from `%s` rule because `%s` profile makes it mandatory.", + tag, + rule.id, + profile, + ) + rule.tags.remove(tag) + # rule_col.rules.remove(rule) + # break + if "opt-in" in rule.tags: + rule.tags.remove("opt-in") + _logger.debug("%s/%s rules included in the profile", len(rule_col), total_rules) diff --git a/src/ansiblelint/rules/args.md b/src/ansiblelint/rules/args.md new file mode 100644 index 0000000..567d0fd --- /dev/null +++ b/src/ansiblelint/rules/args.md @@ -0,0 +1,91 @@ +# args + +This rule validates if the task arguments conform with the plugin documentation. + +The rule validation will check if the option name is valid and has the correct +value along with conditionals on the options like `mutually_exclusive`, +`required_together`, `required_one_of` and so on. + +For more information see the +[argument spec validator](https://docs.ansible.com/ansible/latest/reference_appendices/module_utils.html#argumentspecvalidator) +topic in the Ansible module utility documentation. + +Possible messages: + +- `args[module]` - missing required arguments: ... +- `args[module]` - missing parameter(s) required by ... + +## Problematic Code + +```yaml +--- +- name: Fixture to validate module options failure scenarios + hosts: localhost + tasks: + - name: Clone content repository + ansible.builtin.git: # <- Required option `repo` is missing. + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + ansible.builtin.systemd: # <- Missing 'name' parameter required by 'enabled'. + enabled: true + masked: false + + - name: Use quiet to avoid verbose output + ansible.builtin.assert: + test: + - my_param <= 100 + - my_param >= 0 + quiet: invalid # <- Value for option `quiet` is invalid. +``` + +## Correct Code + +```yaml +--- +- name: Fixture to validate module options pass scenario + hosts: localhost + tasks: + - name: Clone content repository + ansible.builtin.git: # <- Contains required option `repo`. + repo: https://github.com/ansible/ansible-examples + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + ansible.builtin.systemd: # <- Contains 'name' parameter required by 'enabled'. + name: httpd + enabled: false + masked: false + + - name: Use quiet to avoid verbose output + ansible.builtin.assert: + that: + - my_param <= 100 + - my_param >= 0 + quiet: True # <- Has correct type value for option `quiet` which is boolean. +``` + +## Special cases + +In some complex cases where you are using jinja expressions, the linter may not +able to fully validate all the possible values and report a false positive. The +example below would usually report +`parameters are mutually exclusive: data|file|keyserver|url` but because we +added `# noqa: args[module]` it will just pass. + +```yaml +- name: Add apt keys # noqa: args[module] + become: true + ansible.builtin.apt_key: + url: "{{ zj_item['url'] | default(omit) }}" + data: "{{ zj_item['data'] | default(omit) }}" + loop: "{{ repositories_keys }}" + loop_control: + loop_var: zj_item +``` diff --git a/src/ansiblelint/rules/args.py b/src/ansiblelint/rules/args.py new file mode 100644 index 0000000..0ff532e --- /dev/null +++ b/src/ansiblelint/rules/args.py @@ -0,0 +1,280 @@ +"""Rule definition to validate task options.""" +from __future__ import annotations + +import contextlib +import importlib.util +import io +import json +import logging +import re +import sys +from functools import lru_cache +from typing import Any + +# pylint: disable=preferred-module +from unittest import mock +from unittest.mock import patch + +# pylint: disable=reimported +import ansible.module_utils.basic as mock_ansible_module +from ansible.module_utils import basic +from ansible.plugins import loader + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.text import has_jinja +from ansiblelint.yaml_utils import clean_json + +_logger = logging.getLogger(__name__) + + +@lru_cache +def load_module(module_name: str) -> loader.PluginLoadContext: + """Load plugin from module name and cache it.""" + return loader.module_loader.find_plugin_with_context(module_name) + + +class ValidationPassed(Exception): + """Exception to be raised when validation passes.""" + + +class CustomAnsibleModule(basic.AnsibleModule): # type: ignore + """Mock AnsibleModule class.""" + + def __init__(self, *args: str, **kwargs: str) -> None: + """Initialize AnsibleModule mock.""" + super().__init__(*args, **kwargs) + raise ValidationPassed + + +class ArgsRule(AnsibleLintRule): + """Validating module arguments.""" + + id = "args" + severity = "HIGH" + description = "Check whether tasks are using correct module options." + tags = ["syntax", "experimental"] + version_added = "v6.10.0" + module_aliases: dict[str, str] = {"block/always/rescue": "block/always/rescue"} + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + # pylint: disable=too-many-branches,too-many-locals + results: list[MatchError] = [] + module_name = task["action"]["__ansible_module_original__"] + failed_msg = None + + if module_name in self.module_aliases: + return [] + + loaded_module = load_module(module_name) + module_args = { + key: value + for key, value in task["action"].items() + if not key.startswith("__") + } + # https://github.com/ansible/ansible-lint/issues/2824 + if loaded_module.resolved_fqcn == "ansible.builtin.async_status": + module_args["_async_dir"] = "/tmp/ansible-async" + if loaded_module.resolved_fqcn == "ansible.builtin.service": + _logger.debug( + "Skipped service module validation as not being supported yet." + ) + return [] + + with mock.patch.object( + mock_ansible_module, "AnsibleModule", CustomAnsibleModule + ): + spec = importlib.util.spec_from_file_location( + name=loaded_module.resolved_fqcn, + location=loaded_module.plugin_resolved_path, + ) + if spec: + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + else: + assert file is not None + _logger.warning( + "Unable to load module %s at %s:%s for options validation", + module_name, + file.filename, + task[LINE_NUMBER_KEY], + ) + return [] + + try: + if not hasattr(module, "main"): + # skip validation for module options that are implemented as action plugin + # as the option values can be changed in action plugin and are not passed + # through `ArgumentSpecValidator` class as in case of modules. + return [] + + with patch.object( + sys, + "argv", + ["", json.dumps({"ANSIBLE_MODULE_ARGS": clean_json(module_args)})], + ): + fio = io.StringIO() + failed_msg = "" + # Warning: avoid running anything while stdout is redirected + # as what happens may be very hard to debug. + with contextlib.redirect_stdout(fio): + # pylint: disable=protected-access + basic._ANSIBLE_ARGS = None + try: + module.main() + except SystemExit: + failed_msg = fio.getvalue() + if failed_msg: + results.extend( + self._parse_failed_msg(failed_msg, task, module_name, file) + ) + + sanitized_results = self._sanitize_results(results, module_name) + return sanitized_results + except ValidationPassed: + return [] + + def _sanitize_results( + self, results: list[MatchError], module_name: str + ) -> list[MatchError]: + """Remove results that are false positive.""" + sanitized_results = [] + for result in results: + result_msg = result.message + if result_msg.startswith("Unsupported parameters"): + # cmd option is a special case in command module and after option validation is done. + if ( + "Unsupported parameters for (basic.py) module" in result_msg + and module_name + in ["command", "ansible.builtin.command", "ansible.legacy.command"] + ): + continue + result.message = result_msg.replace("(basic.py)", f"{module_name}") + elif result_msg.startswith("missing required arguments"): + if ( + "missing required arguments: free_form" in result_msg + and module_name + in [ + "raw", + "ansible.builtin.raw", + "ansible.legacy.raw", + "meta", + "ansible.builtin.meta", + "ansible.legacy.meta", + ] + ): + # free_form option is a special case in raw module hence ignore this error. + continue + if ( + "missing required arguments: key_value" in result_msg + and module_name + in [ + "set_fact", + "ansible.builtin.set_fact", + "ansible.legacy.set_fact", + ] + ): + # handle special case for set_fact module with key and value + continue + if "Supported parameters include" in result_msg and module_name in [ + "set_fact", + "ansible.builtin.set_fact", + "ansible.legacy.set_fact", + ]: + continue + sanitized_results.append(result) + + return sanitized_results + + def _parse_failed_msg( + self, + failed_msg: str, + task: dict[str, Any], + module_name: str, + file: Lintable | None = None, + ) -> list[MatchError]: + """Parse failed message and return list of MatchError.""" + results: list[MatchError] = [] + try: + failed_obj = json.loads(failed_msg) + error_message = failed_obj["msg"] + except json.decoder.JSONDecodeError: + error_message = failed_msg + + option_type_check_error = re.search( + r"argument '(?P<name>.*)' is of type", error_message + ) + if option_type_check_error: + # ignore options with templated variable value with type check errors + option_key = option_type_check_error.group("name") + option_value = task["action"][option_key] + if has_jinja(option_value): + _logger.debug( + "Type checking ignored for '%s' option in task '%s' at line %s.", + option_key, + module_name, + task[LINE_NUMBER_KEY], + ) + return results + + value_not_in_choices_error = re.search( + r"value of (?P<name>.*) must be one of:", error_message + ) + if value_not_in_choices_error: + # ignore templated value not in allowed choices + choice_key = value_not_in_choices_error.group("name") + choice_value = task["action"][choice_key] + if has_jinja(choice_value): + _logger.debug( + "Value checking ignored for '%s' option in task '%s' at line %s.", + choice_key, + module_name, + task[LINE_NUMBER_KEY], + ) + return results + + results.append( + self.create_matcherror( + message=error_message, + linenumber=task[LINE_NUMBER_KEY], + tag="args[module]", + filename=file, + ) + ) + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_args_module_fail() -> None: + """Test rule invalid module options.""" + collection = RulesCollection() + collection.register(ArgsRule()) + success = "examples/playbooks/rule-args-module-fail-1.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 5 + assert results[0].tag == "args[module]" + assert "missing required arguments" in results[0].message + assert results[1].tag == "args[module]" + assert "missing parameter(s) required by " in results[1].message + assert results[2].tag == "args[module]" + assert "Unsupported parameters for" in results[2].message + assert results[3].tag == "args[module]" + assert "Unsupported parameters for" in results[2].message + assert results[4].tag == "args[module]" + assert "value of state must be one of" in results[4].message + + def test_args_module_pass() -> None: + """Test rule valid module options.""" + collection = RulesCollection() + collection.register(ArgsRule()) + success = "examples/playbooks/rule-args-module-pass-1.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/avoid_implicit.md b/src/ansiblelint/rules/avoid_implicit.md new file mode 100644 index 0000000..4c3d781 --- /dev/null +++ b/src/ansiblelint/rules/avoid_implicit.md @@ -0,0 +1,37 @@ +# avoid-implicit + +This rule identifies the use of dangerous implicit behaviors, often also +undocumented. + +This rule will produce the following type of error messages: + +- `avoid-implicit[copy-content]` is not a string as [copy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html#synopsis) + modules also accept these, but without documenting them. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + ansible.builtin.copy: + content: { "foo": "bar" } # <-- should use explicit jinja template + dest: /tmp/foo.txt +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + vars: + content: { "foo": "bar" } + ansible.builtin.copy: + content: "{{ content | to_json }}" # explicit better than implicit! + dest: /tmp/foo.txt +``` diff --git a/src/ansiblelint/rules/avoid_implicit.py b/src/ansiblelint/rules/avoid_implicit.py new file mode 100644 index 0000000..dbd44d6 --- /dev/null +++ b/src/ansiblelint/rules/avoid_implicit.py @@ -0,0 +1,58 @@ +"""Implementation of avoid-implicit rule.""" +# https://github.com/ansible/ansible-lint/issues/2501 +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class AvoidImplicitRule(AnsibleLintRule): + """Rule that identifies use of undocumented or discouraged implicit behaviors.""" + + id = "avoid-implicit" + shortdesc = "Avoid implicit behaviors" + description = ( + "Items which are templated should use ``template`` instead of " + "``copy`` with ``content`` to ensure correctness." + ) + severity = "MEDIUM" + tags = ["unpredictability"] + version_added = "v6.8.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Confirm if current rule is matching a specific task.""" + if task["action"]["__ansible_module__"] == "copy": + content = task["action"].get("content", "") + if not isinstance(content, str): + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_template_instead_of_copy_positive() -> None: + """Positive test for avoid-implicit.""" + collection = RulesCollection() + collection.register(AvoidImplicitRule()) + success = "examples/playbooks/rule-avoid-implicit-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_template_instead_of_copy_negative() -> None: + """Negative test for avoid-implicit.""" + collection = RulesCollection() + collection.register(AvoidImplicitRule()) + failure = "examples/playbooks/rule-avoid-implicit-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 diff --git a/src/ansiblelint/rules/command_instead_of_module.md b/src/ansiblelint/rules/command_instead_of_module.md new file mode 100644 index 0000000..a4e69b0 --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_module.md @@ -0,0 +1,35 @@ +# command-instead-of-module + +This rule will recommend you to use a specific ansible module instead for tasks +that are better served by a module, as these are more reliable, provide better +messaging and usually have additional features like the ability to retry. + +In the unlikely case that the rule triggers false positives, you can disable it +by adding a comment like `# noqa: command-instead-of-module` to the same line. + +You can check the [source](https://github.com/ansible/ansible-lint/blob/main/src/ansiblelint/rules/command_instead_of_module.py) +of the rule for all the known commands that trigger the rule and their allowed +list arguments of exceptions and raise a pull request to improve them. + +## Problematic Code + +```yaml +--- +- name: Update apt cache + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update # <-- better to use ansible.builtin.apt module +``` + +## Correct Code + +```yaml +--- +- name: Update apt cache + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.apt: + update_cache: true +``` diff --git a/src/ansiblelint/rules/command_instead_of_module.py b/src/ansiblelint/rules/command_instead_of_module.py new file mode 100644 index 0000000..b55775f --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_module.py @@ -0,0 +1,262 @@ +"""Implementation of command-instead-of-module rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean, get_first_cmd_arg, get_second_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandsInsteadOfModulesRule(AnsibleLintRule): + """Using command rather than module.""" + + id = "command-instead-of-module" + description = ( + "Executing a command when there is an Ansible module is generally a bad idea" + ) + severity = "HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + _commands = ["command", "shell"] + _modules = { + "apt-get": "apt-get", + "chkconfig": "service", + "curl": "get_url or uri", + "git": "git", + "hg": "hg", + "letsencrypt": "acme_certificate", + "mktemp": "tempfile", + "mount": "mount", + "patch": "patch", + "rpm": "yum or rpm_key", + "rsync": "synchronize", + "sed": "template, replace or lineinfile", + "service": "service", + "supervisorctl": "supervisorctl", + "svn": "subversion", + "systemctl": "systemd", + "tar": "unarchive", + "unzip": "unarchive", + "wget": "get_url or uri", + "yum": "yum", + } + + _executable_options = { + "git": ["branch", "log", "lfs"], + "systemctl": ["--version", "kill", "set-default", "show-environment", "status"], + "yum": ["clean"], + "rpm": ["--nodeps"], + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] not in self._commands: + return False + + first_cmd_arg = get_first_cmd_arg(task) + second_cmd_arg = get_second_cmd_arg(task) + + if not first_cmd_arg: + return False + + executable = os.path.basename(first_cmd_arg) + + if ( + second_cmd_arg + and executable in self._executable_options + and second_cmd_arg in self._executable_options[executable] + ): + return False + + if executable in self._modules and convert_to_boolean( + task["action"].get("warn", True) + ): + message = "{0} used in place of {1} module" + return message.format(executable, self._modules[executable]) + return False + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + APT_GET = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update +""" + + GIT_COMMANDS_OK = """ +- hosts: all + tasks: + - name: Print current git branch + command: git branch + - name: Print git log + command: git log + - name: Install git lfs support + command: git lfs install +""" + + RESTART_SSHD = """ +- hosts: all + tasks: + - name: Restart sshd + command: systemctl restart sshd +""" + + SYSTEMCTL_STATUS = """ +- hosts: all + tasks: + - name: Show systemctl service status + command: systemctl status systemd-timesyncd +""" + + SYSTEMD_ENVIRONMENT = """ +- hosts: all + tasks: + - name: Show systemd environment + command: systemctl show-environment +""" + + SYSTEMD_RUNLEVEL = """ +- hosts: all + tasks: + - name: Set systemd runlevel + command: systemctl set-default multi-user.target +""" + + SYSTEMD_KILL = """ +- hosts: all + tasks: + - name: Kill service using SIGUSR1 + command: systemctl kill --signal=SIGUSR1 sshd +""" + + YUM_UPDATE = """ +- hosts: all + tasks: + - name: Run yum update + command: yum update +""" + + YUM_CLEAN = """ +- hosts: all + tasks: + - name: Clear yum cache + command: yum clean all +""" + + NO_COMMAND = """ +- hosts: all + tasks: + - name: Clear yum cache + command: "" +""" + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_apt_get(rule_runner: RunFromText) -> None: + """The apt module supports update.""" + results = rule_runner.run_playbook(APT_GET) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_restart_sshd(rule_runner: RunFromText) -> None: + """Restarting services is supported by the systemd module.""" + results = rule_runner.run_playbook(RESTART_SSHD) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_git_commands_ok(rule_runner: RunFromText) -> None: + """Check the git commands not supported by the git module do not trigger rule.""" + results = rule_runner.run_playbook(GIT_COMMANDS_OK) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_status(rule_runner: RunFromText) -> None: + """Set-default is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMCTL_STATUS) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_environment(rule_runner: RunFromText) -> None: + """Showing the environment is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_ENVIRONMENT) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_runlevel(rule_runner: RunFromText) -> None: + """Set-default is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_RUNLEVEL) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_kill(rule_runner: RunFromText) -> None: + """Kill is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_KILL) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_yum_update(rule_runner: RunFromText) -> None: + """Using yum update should fail.""" + results = rule_runner.run_playbook(YUM_UPDATE) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_yum_clean(rule_runner: RunFromText) -> None: + """The yum module does not support clearing yum cache.""" + results = rule_runner.run_playbook(YUM_CLEAN) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_no_command(rule_runner: RunFromText) -> None: + """If no command is passed it should return 0.""" + results = rule_runner.run_playbook(NO_COMMAND) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/command_instead_of_shell.md b/src/ansiblelint/rules/command_instead_of_shell.md new file mode 100644 index 0000000..0abf69d --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_shell.md @@ -0,0 +1,30 @@ +# command-instead-of-shell + +This rule identifies uses of `shell` modules instead of a `command` one when +this is not really needed. Shell is considerably slower than command and should +be avoided unless there is a special need for using shell features, like +environment variable expansion or chaining multiple commands using pipes. + +## Problematic Code + +```yaml +--- +- name: Problematic example + hosts: localhost + tasks: + - name: Echo a message + ansible.builtin.shell: echo hello # <-- command is better in this case + changed_when: false +``` + +## Correct Code + +```yaml +--- +- name: Correct example + hosts: localhost + tasks: + - name: Echo a message + ansible.builtin.command: echo hello + changed_when: false +``` diff --git a/src/ansiblelint/rules/command_instead_of_shell.py b/src/ansiblelint/rules/command_instead_of_shell.py new file mode 100644 index 0000000..d1cb5b1 --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_shell.py @@ -0,0 +1,177 @@ +"""Implementation of command-instead-of-shell rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +FAIL_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell no pipe + ansible.builtin.shell: + cmd: echo hello + changed_when: false + + - name: Shell with jinja filter + ansible.builtin.shell: + cmd: echo {{ "hello" | upper }} + changed_when: false + + - name: Shell with jinja filter (fqcn) + ansible.builtin.shell: + cmd: echo {{ "hello" | upper }} + changed_when: false + + - name: Command with executable parameter + ansible.builtin.shell: + cmd: clear + args: + executable: /bin/bash + changed_when: false +""" + +SUCCESS_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell with pipe + ansible.builtin.shell: + cmd: echo hello | true # noqa: risky-shell-pipe + changed_when: false + + - name: Shell with redirect + ansible.builtin.shell: + cmd: echo hello > /tmp/hello + changed_when: false + + - name: Chain two shell commands + ansible.builtin.shell: + cmd: echo hello && echo goodbye + changed_when: false + + - name: Run commands in succession + ansible.builtin.shell: + cmd: echo hello ; echo goodbye + changed_when: false + + - name: Use variables + ansible.builtin.shell: + cmd: echo $HOME $USER + changed_when: false + + - name: Use * for globbing + ansible.builtin.shell: + cmd: ls foo* + changed_when: false + + - name: Use ? for globbing + ansible.builtin.shell: + cmd: ls foo? + changed_when: false + + - name: Use [] for globbing + ansible.builtin.shell: + cmd: ls foo[1,2,3] + changed_when: false + + - name: Use shell generator + ansible.builtin.shell: + cmd: ls foo{.txt,.xml} + changed_when: false + + - name: Use backticks + ansible.builtin.shell: + cmd: ls `ls foo*` + changed_when: false + + - name: Use shell with cmd + ansible.builtin.shell: + cmd: | + set -x + ls foo? + changed_when: false +""" + + +class UseCommandInsteadOfShellRule(AnsibleLintRule): + """Use shell only when shell functionality is required.""" + + id = "command-instead-of-shell" + description = ( + "Shell should only be used when piping, redirecting " + "or chaining commands (and Ansible would be preferred " + "for some of those!)" + ) + severity = "HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + # Use unjinja so that we don't match on jinja filters + # rather than pipes + if task["action"]["__ansible_module__"] in ["shell", "ansible.builtin.shell"]: + # Since Ansible 2.4, the `command` module does not accept setting + # the `executable`. If the user needs to set it, they have to use + # the `shell` module. + if "executable" in task["action"]: + return False + + if "cmd" in task["action"]: + jinja_stripped_cmd = self.unjinja(task["action"].get("cmd", [])) + else: + jinja_stripped_cmd = self.unjinja( + " ".join(task["action"].get("__ansible_arguments__", [])) + ) + return not any(ch in jinja_stripped_cmd for ch in "&|<>;$\n*[]{}?`") + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("text", "expected"), + ( + pytest.param(SUCCESS_PLAY, 0, id="good"), + pytest.param(FAIL_PLAY, 3, id="bad"), + ), + ) + def test_rule_command_instead_of_shell( + default_text_runner: RunFromText, text: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = default_text_runner.run_playbook(text) + for result in results: + assert result.rule.id == UseCommandInsteadOfShellRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/conftest.py b/src/ansiblelint/rules/conftest.py new file mode 100644 index 0000000..f4df7a5 --- /dev/null +++ b/src/ansiblelint/rules/conftest.py @@ -0,0 +1,3 @@ +"""Makes pytest fixtures available.""" +# pylint: disable=wildcard-import,unused-wildcard-import +from ansiblelint.testing.fixtures import * # noqa: F403 diff --git a/src/ansiblelint/rules/custom/__init__.py b/src/ansiblelint/rules/custom/__init__.py new file mode 100644 index 0000000..8c3e048 --- /dev/null +++ b/src/ansiblelint/rules/custom/__init__.py @@ -0,0 +1 @@ +"""A placeholder package for putting custom rules under this dir.""" diff --git a/src/ansiblelint/rules/deprecated_bare_vars.md b/src/ansiblelint/rules/deprecated_bare_vars.md new file mode 100644 index 0000000..9e2f15b --- /dev/null +++ b/src/ansiblelint/rules/deprecated_bare_vars.md @@ -0,0 +1,32 @@ +# deprecated-bare-vars + +This rule identifies possible confusing expressions where it is not clear if +a variable or string is to be used and asks for clarification. + +You should either use the full variable syntax ('{{{{ {0} }}}}') or, whenever +possible, convert it to a list of strings. + +## Problematic code + +```yaml +--- +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: foo # <-- deprecated-bare-vars +``` + +## Correct code + +```yaml +--- +# if foo is not really a variable: +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: + - foo + +# if foo is a variable: +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: "{{ foo }}" +``` diff --git a/src/ansiblelint/rules/deprecated_bare_vars.py b/src/ansiblelint/rules/deprecated_bare_vars.py new file mode 100644 index 0000000..40912f8 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_bare_vars.py @@ -0,0 +1,117 @@ +"""Implementation of deprecated-bare-vars rule.""" + +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import has_glob, has_jinja + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class UsingBareVariablesIsDeprecatedRule(AnsibleLintRule): + """Using bare variables is deprecated.""" + + id = "deprecated-bare-vars" + description = ( + "Using bare variables is deprecated. Update your " + "playbooks so that the environment value uses the full variable " + "syntax ``{{ your_variable }}``" + ) + severity = "VERY_HIGH" + tags = ["deprecations"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + loop_type = next((key for key in task if key.startswith("with_")), None) + if loop_type: + if loop_type in [ + "with_nested", + "with_together", + "with_flattened", + "with_filetree", + "with_community.general.filetree", + ]: + # These loops can either take a list defined directly in the task + # or a variable that is a list itself. When a single variable is used + # we just need to check that one variable, and not iterate over it like + # it's a list. Otherwise, loop through and check all items. + items = task[loop_type] + if not isinstance(items, (list, tuple)): + items = [items] + for var in items: + return self._matchvar(var, task, loop_type) + elif loop_type == "with_subelements": + return self._matchvar(task[loop_type][0], task, loop_type) + elif loop_type in ["with_sequence", "with_ini", "with_inventory_hostnames"]: + pass + else: + return self._matchvar(task[loop_type], task, loop_type) + return False + + def _matchvar( + self, varstring: str, task: dict[str, Any], loop_type: str + ) -> bool | str: + if isinstance(varstring, str) and not has_jinja(varstring): + valid = loop_type == "with_fileglob" and bool( + has_jinja(varstring) or has_glob(varstring) + ) + + valid |= loop_type == "with_filetree" and bool( + has_jinja(varstring) or varstring.endswith(os.sep) + ) + if not valid: + message = ( + "Possible bare variable '{0}' used in a '{1}' loop." + + " You should use the full variable syntax ('{{{{ {0} }}}}') or convert it to a list if that is not really a variable." + ) + return message.format(task[loop_type], loop_type) + return False + + +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_use_bare_positive() -> None: + """Positive test for deprecated-bare-vars.""" + collection = RulesCollection() + collection.register(UsingBareVariablesIsDeprecatedRule()) + success = "examples/playbooks/rule-deprecated-bare-vars-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_use_bare_negative() -> None: + """Negative test for deprecated-bare-vars.""" + collection = RulesCollection() + collection.register(UsingBareVariablesIsDeprecatedRule()) + failure = "examples/playbooks/rule-deprecated-bare-vars-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 12 diff --git a/src/ansiblelint/rules/deprecated_command_syntax.md b/src/ansiblelint/rules/deprecated_command_syntax.md new file mode 100644 index 0000000..f99ca99 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_command_syntax.md @@ -0,0 +1,32 @@ +# deprecated-command-syntax + +This rule identifies the use of shorthand (free-form) syntax as this is highly +discouraged inside playbooks, mainly because it can easily lead to bugs that +are hard to identify. + +While using the free-form from the command line is ok, it should never be used +inside playbooks. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Perform chmod + ansible.builtin.command: creates=B chmod 644 A # <-- do not use shorthand +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Perform chmod + ansible.builtin.command: chmod 644 A + args: + creates: B +``` diff --git a/src/ansiblelint/rules/deprecated_command_syntax.py b/src/ansiblelint/rules/deprecated_command_syntax.py new file mode 100644 index 0000000..53ed94f --- /dev/null +++ b/src/ansiblelint/rules/deprecated_command_syntax.py @@ -0,0 +1,101 @@ +"""Implementation of deprecated-command-syntax rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean, get_first_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandsInsteadOfArgumentsRule(AnsibleLintRule): + """Using command rather than an argument to e.g. file.""" + + id = "deprecated-command-syntax" + description = ( + "Executing a command when there are arguments to modules " + "is generally a bad idea" + ) + severity = "VERY_HIGH" + tags = ["command-shell", "deprecations"] + version_added = "historic" + + _commands = ["command", "shell", "raw"] + _arguments = { + "chown": "owner", + "chmod": "mode", + "chgrp": "group", + "ln": "state=link", + "mkdir": "state=directory", + "rmdir": "state=absent", + "rm": "state=absent", + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in self._commands: + first_cmd_arg = get_first_cmd_arg(task) + if not first_cmd_arg: + return False + + executable = os.path.basename(first_cmd_arg) + if executable in self._arguments and convert_to_boolean( + task["action"].get("warn", True) + ): + message = "{0} used in place of argument {1} to file module" + return message.format(executable, self._arguments[executable]) + return False + + +DEPRECATED_COMMAND_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell with pipe + ansible.builtin.command: + err: echo hello | true # noqa: risky-shell-pipe + changed_when: false +""" + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("text", "expected"), + (pytest.param(DEPRECATED_COMMAND_PLAY, 0, id="no_first_cmd_arg"),), + ) + def test_rule_deprecated_command_no_first_cmd_arg( + default_text_runner: RunFromText, text: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = default_text_runner.run_playbook(text) + assert len(results) == expected diff --git a/src/ansiblelint/rules/deprecated_local_action.md b/src/ansiblelint/rules/deprecated_local_action.md new file mode 100644 index 0000000..d5e8361 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_local_action.md @@ -0,0 +1,21 @@ +# deprecated-local-action + +This rule recommends using `delegate_to: localhost` instead of the +`local_action`. + +## Problematic Code + +```yaml +--- +- name: Task example + local_action: # <-- this is deprecated + module: boto3_facts +``` + +## Correct Code + +```yaml +- name: Task example + boto3_facts: + delegate_to: localhost # <-- recommended way to run on localhost +``` diff --git a/src/ansiblelint/rules/deprecated_local_action.py b/src/ansiblelint/rules/deprecated_local_action.py new file mode 100644 index 0000000..c77e208 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_local_action.py @@ -0,0 +1,62 @@ +"""Implementation for deprecated-local-action rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class TaskNoLocalAction(AnsibleLintRule): + """Do not use 'local_action', use 'delegate_to: localhost'.""" + + id = "deprecated-local-action" + description = "Do not use ``local_action``, use ``delegate_to: localhost``" + needs_raw_task = True + severity = "MEDIUM" + tags = ["deprecations"] + version_added = "v4.0.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for a task.""" + raw_task = task["__raw_task__"] + if "local_action" in raw_task.keys(): + return True + + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.testing import RunFromText + + FAIL_TASK = """ + - name: Task example + local_action: + module: boto3_facts + """ + + SUCCESS_TASK = """ + - name: Task example + boto3_facts: + delegate_to: localhost # local_action + """ + + @pytest.mark.parametrize(("text", "expected"), ((SUCCESS_TASK, 0), (FAIL_TASK, 1))) + def test_local_action(text: str, expected: int) -> None: + """Positive test deprecated_local_action.""" + collection = RulesCollection() + collection.register(TaskNoLocalAction()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(text) + assert len(results) == expected diff --git a/src/ansiblelint/rules/deprecated_module.md b/src/ansiblelint/rules/deprecated_module.md new file mode 100644 index 0000000..c05d641 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_module.md @@ -0,0 +1,32 @@ +# deprecated-module + +This rule identifies deprecated modules in playbooks. +You should avoid using deprecated modules because they are not maintained, which can pose a security risk. +Additionally when a module is deprecated it is available temporarily with a plan for future removal. + +Refer to the [Ansible module index](https://docs.ansible.com/ansible/latest/collections/index_module.html) for information about replacements and removal dates for deprecated modules. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Configure VLAN ID + ansible.netcommon.net_vlan: # <- Uses a deprecated module. + vlan_id: 20 +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Configure VLAN ID + dellemc.enterprise_sonic.sonic_vlans: # <- Uses a platform specific module. + config: + - vlan_id: 20 +``` diff --git a/src/ansiblelint/rules/deprecated_module.py b/src/ansiblelint/rules/deprecated_module.py new file mode 100644 index 0000000..00a4523 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_module.py @@ -0,0 +1,78 @@ +"""Implementation of deprecated-module rule.""" +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class DeprecatedModuleRule(AnsibleLintRule): + """Deprecated module.""" + + id = "deprecated-module" + description = ( + "These are deprecated modules, some modules are kept " + "temporarily for backwards compatibility but usage is discouraged." + ) + link = "https://docs.ansible.com/ansible/latest/collections/index_module.html" + severity = "HIGH" + tags = ["deprecations"] + version_added = "v4.0.0" + + _modules = [ + # spell-checker:disable + "accelerate", + "aos_asn_pool", + "aos_blueprint", + "aos_blueprint_param", + "aos_blueprint_virtnet", + "aos_device", + "aos_external_router", + "aos_ip_pool", + "aos_logical_device", + "aos_logical_device_map", + "aos_login", + "aos_rack_type", + "aos_template", + "azure", + "cl_bond", + "cl_bridge", + "cl_img_install", + "cl_interface", + "cl_interface_policy", + "cl_license", + "cl_ports", + "cs_nic", + "docker", + "ec2_ami_find", + "ec2_ami_search", + "ec2_remote_facts", + "ec2_vpc", + "kubernetes", + "netscaler", + "nxos_ip_interface", + "nxos_mtu", + "nxos_portchannel", + "nxos_switchport", + "oc", + "panos_nat_policy", + "panos_security_policy", + "vsphere_guest", + "win_msi", + "include", + # spell-checker:enable + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + if module in self._modules: + message = "{0} {1}" + return message.format(self.shortdesc, module) + return False diff --git a/src/ansiblelint/rules/empty_string_compare.md b/src/ansiblelint/rules/empty_string_compare.md new file mode 100644 index 0000000..c20bc51 --- /dev/null +++ b/src/ansiblelint/rules/empty_string_compare.md @@ -0,0 +1,44 @@ +# empty-string-compare + +This rule checks for empty string comparison in playbooks. +To ensure code clarity you should avoid using empty strings in conditional statements with the `when` clause. + +- Use `when: var | length > 0` instead of `when: var != ""`. +- Use `when: var | length == 0` instead of `when: var == ""`. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - empty-string-compare +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Shut down + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_os_family == "" # <- Compares with an empty string. + - name: Shut down + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_os_family !="" # <- Compares with an empty string. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Shut down + ansible.builtin.shell: | + /sbin/shutdown -t now + echo $var == + when: ansible_os_family +``` diff --git a/src/ansiblelint/rules/empty_string_compare.py b/src/ansiblelint/rules/empty_string_compare.py new file mode 100644 index 0000000..ae42ceb --- /dev/null +++ b/src/ansiblelint/rules/empty_string_compare.py @@ -0,0 +1,103 @@ +"""Implementation of empty-string-compare rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ComparisonToEmptyStringRule(AnsibleLintRule): + """Don't compare to empty string.""" + + id = "empty-string-compare" + description = ( + 'Use ``when: var|length > 0`` rather than ``when: var != ""`` (or ' + 'conversely ``when: var|length == 0`` rather than ``when: var == ""``)' + ) + severity = "HIGH" + tags = ["idiom", "opt-in"] + version_added = "v4.0.0" + + empty_string_compare = re.compile("[=!]= ?(\"{2}|'{2})") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + for k, v, _ in nested_items_path(task): + if k == "when": + if isinstance(v, str): + if self.empty_string_compare.search(v): + return True + elif isinstance(v, bool): + pass + else: + for item in v: + if isinstance(item, str) and self.empty_string_compare.search( + item + ): + return True + + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + SUCCESS_PLAY = """ +- hosts: all + tasks: + - name: Shut down + shell: | + /sbin/shutdown -t now + echo $var == "" + when: ansible_os_family + - name: Shut down + shell: | + /sbin/shutdown -t now + echo $var == "" + when: [ansible_os_family] +""" + + FAIL_PLAY = """ +- hosts: all + tasks: + - name: Shut down + command: /sbin/shutdown -t now + when: ansible_os_family == "" + - name: Shut down + command: /sbin/shutdown -t now + when: ansible_os_family !="" + - name: Shut down + command: /sbin/shutdown -t now + when: False +""" + + @pytest.mark.parametrize( + "rule_runner", (ComparisonToEmptyStringRule,), indirect=["rule_runner"] + ) + def test_rule_empty_string_compare_fail(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(FAIL_PLAY) + assert len(results) == 2 + for result in results: + assert result.message == ComparisonToEmptyStringRule().shortdesc + + @pytest.mark.parametrize( + "rule_runner", (ComparisonToEmptyStringRule,), indirect=["rule_runner"] + ) + def test_rule_empty_string_compare_pass(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(SUCCESS_PLAY) + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/fqcn.md b/src/ansiblelint/rules/fqcn.md new file mode 100644 index 0000000..fc96ed2 --- /dev/null +++ b/src/ansiblelint/rules/fqcn.md @@ -0,0 +1,76 @@ +# fqcn + +This rule checks for fully-qualified collection names (FQCN) in Ansible content. + +Declaring an FQCN ensures that an action uses code from the correct namespace. +This avoids ambiguity and conflicts that can cause operations to fail or produce +unexpected results. + +The `fqcn` rule has the following checks: + +- `fqcn[action]` - Use FQCN for module actions, such ... +- `fqcn[action-core]` - Checks for FQCNs from the `ansible.legacy` or + `ansible.builtin` collection. +- `fqcn[canonical]` - You should use canonical module name ... instead of ... +- `fqcn[keyword]` - Avoid `collections` keyword by using FQCN for all plugins, + modules, roles and playbooks. + +!!! note + + In most cases you should declare the `ansible.builtin` collection for internal Ansible actions. + You should declare the `ansible.legacy` collection if you use local overrides with actions, such with as the ``shell`` module. + +!!! warning + + This rule does not take [`collections` keyword](https://docs.ansible.com/ansible/latest/collections_guide/collections_using_playbooks.html#simplifying-module-names-with-the-collections-keyword) into consideration for resolving content. + The `collections` keyword provided a temporary mechanism transitioning to Ansible 2.9. + You should rewrite any content that uses the `collections:` key and avoid it where possible. + +## Canonical module names + +Canonical module names are also known as **resolved module names** and they are +to be preferred for most cases. Many Ansible modules have multiple aliases and +redirects, as these were created over time while the content was refactored. +Still, all of them do finally resolve to the same module name, but not without +adding some performance overhead. As very old aliases are at some point removed, +it makes to just refresh the content to make it point to the current canonical +name. + +The only exception for using a canonical name is if your code still needs to be +compatible with a very old version of Ansible, one that does not know how to +resolve that name. If you find yourself in such a situation, feel free to add +this rule to the ignored list. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Create an SSH connection + shell: ssh ssh_user@{{ ansible_ssh_host }} # <- Does not use the FQCN for the shell module. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook (1st solution) + hosts: all + tasks: + - name: Create an SSH connection + # Use the FQCN for the legacy shell module and allow local overrides. + ansible.legacy.shell: + ssh ssh_user@{{ ansible_ssh_host }} -o IdentityFile=path/to/my_rsa +``` + +```yaml +--- +- name: Example playbook (2nd solution) + hosts: all + tasks: + - name: Create an SSH connection + # Use the FQCN for the builtin shell module. + ansible.builtin.shell: ssh ssh_user@{{ ansible_ssh_host }} +``` diff --git a/src/ansiblelint/rules/fqcn.py b/src/ansiblelint/rules/fqcn.py new file mode 100644 index 0000000..64f4ee3 --- /dev/null +++ b/src/ansiblelint/rules/fqcn.py @@ -0,0 +1,202 @@ +"""Rule definition for usage of fully qualified collection names for builtins.""" +from __future__ import annotations + +import logging +import sys +from typing import Any + +from ansible.plugins.loader import module_loader + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection + +_logger = logging.getLogger(__name__) + +builtins = [ + # spell-checker:disable + "add_host", + "apt", + "apt_key", + "apt_repository", + "assemble", + "assert", + "async_status", + "blockinfile", + "command", + "copy", + "cron", + "debconf", + "debug", + "dnf", + "dpkg_selections", + "expect", + "fail", + "fetch", + "file", + "find", + "gather_facts", + "get_url", + "getent", + "git", + "group", + "group_by", + "hostname", + "import_playbook", + "import_role", + "import_tasks", + "include", + "include_role", + "include_tasks", + "include_vars", + "iptables", + "known_hosts", + "lineinfile", + "meta", + "package", + "package_facts", + "pause", + "ping", + "pip", + "raw", + "reboot", + "replace", + "rpm_key", + "script", + "service", + "service_facts", + "set_fact", + "set_stats", + "setup", + "shell", + "slurp", + "stat", + "subversion", + "systemd", + "sysvinit", + "tempfile", + "template", + "unarchive", + "uri", + "user", + "wait_for", + "wait_for_connection", + "yum", + "yum_repository", + # spell-checker:enable +] + + +class FQCNBuiltinsRule(AnsibleLintRule): + """Use FQCN for builtin actions.""" + + id = "fqcn" + severity = "MEDIUM" + description = ( + "Check whether actions are using using full qualified collection names." + ) + tags = ["formatting"] + version_added = "v6.8.0" + module_aliases: dict[str, str] = {"block/always/rescue": "block/always/rescue"} + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + module = task["action"]["__ansible_module_original__"] + + if module not in self.module_aliases: + loaded_module = module_loader.find_plugin_with_context(module) + target = loaded_module.resolved_fqcn + self.module_aliases[module] = target + if target is None: + _logger.warning("Unable to resolve FQCN for module %s", module) + self.module_aliases[module] = module + return [] + if target not in self.module_aliases: + self.module_aliases[target] = target + + if module != self.module_aliases[module]: + module_alias = self.module_aliases[module] + if module_alias.startswith("ansible.builtin"): + legacy_module = module_alias.replace( + "ansible.builtin.", "ansible.legacy.", 1 + ) + if module != legacy_module: + result.append( + self.create_matcherror( + message=f"Use FQCN for builtin module actions ({module}).", + details=f"Use `{module_alias}` or `{legacy_module}` instead.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[action-core]", + ) + ) + else: + if module.count(".") < 2: + result.append( + self.create_matcherror( + message=f"Use FQCN for module actions, such `{self.module_aliases[module]}`.", + details=f"Action `{module}` is not FQCN.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[action]", + ) + ) + # TODO(ssbarnea): Remove the c.g. and c.n. exceptions from here once + # community team is flattening these. + # See: https://github.com/ansible-community/community-topics/issues/147 + elif not module.startswith("community.general.") or module.startswith( + "community.network." + ): + result.append( + self.create_matcherror( + message=f"You should use canonical module name `{self.module_aliases[module]}` instead of `{module}`.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[canonical]", + ) + ) + return result + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind != "playbook": + return [] + if "collections" in data: + return [ + self.create_matcherror( + message="Avoid `collections` keyword by using FQCN for all plugins, modules, roles and playbooks.", + linenumber=data[LINE_NUMBER_KEY], + tag="fqcn[keyword]", + filename=file, + ) + ] + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_fqcn_builtin_fail() -> None: + """Test rule matches.""" + collection = RulesCollection() + collection.register(FQCNBuiltinsRule()) + success = "examples/playbooks/rule-fqcn-fail.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 3 + assert results[0].tag == "fqcn[keyword]" + assert "Avoid `collections` keyword" in results[0].message + assert results[1].tag == "fqcn[action-core]" + assert "Use FQCN for builtin module actions" in results[1].message + assert results[2].tag == "fqcn[action]" + assert "Use FQCN for module actions, such" in results[2].message + + def test_fqcn_builtin_pass() -> None: + """Test rule does not match.""" + collection = RulesCollection() + collection.register(FQCNBuiltinsRule()) + success = "examples/playbooks/rule-fqcn-pass.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/galaxy.md b/src/ansiblelint/rules/galaxy.md new file mode 100644 index 0000000..b974d87 --- /dev/null +++ b/src/ansiblelint/rules/galaxy.md @@ -0,0 +1,108 @@ +# galaxy + +This rule identifies if the collection version mentioned in galaxy.yml is ideal +in terms of the version number being greater than or equal to `1.0.0`. + +This rule looks for a changelog file in expected locations, detailed below in +the Changelog Details section. + +This rule checks to see if the `galaxy.yml` file includes one of the required +tags for certification on Automation Hub. Additional custom tags can be added, +but one or more of these tags must be present for certification. + +The tag list is as follows: `application`, `cloud`,`database`, `infrastructure`, +`linux`, `monitoring`, `networking`, `security`,`storage`, `tools`, `windows`. + +This rule can produce messages such: + +- `galaxy[version-missing]` - `galaxy.yaml` should have version tag. +- `galaxy[version-incorrect]` - collection version should be greater than or + equal to `1.0.0` +- `galaxy[no-changelog]` - collection is missing a changelog file in expected + locations. +- `galaxy[tags]` - `galaxy.yaml` must have one of the required tags: + `application`, `cloud`, `database`, `infrastructure`, `linux`, `monitoring`, + `networking`, `security`, `storage`, `tools`, `windows`. + +If you want to ignore some of the messages above, you can add any of them to the +`ignore_list`. + +## Problematic code + +```yaml +# galaxy.yml +--- +name: foo +namespace: bar +version: 0.2.3 # <-- collection version should be >= 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +``` + +## Correct code + +```yaml +# galaxy.yml +--- +name: foo +namespace: bar +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +``` + +# Changelog Details + +This rule expects a `CHANGELOG.md` or `.rst` file in the collection root or a +`changelogs/changelog.yaml` file. + +If a `changelogs/changelog.yaml` file exists, the schema will be checked. + +## Minimum required changelog.yaml file + +```yaml +# changelog.yaml +--- +releases: {} +``` + +# Required Tag Details + +## Problematic code + +```yaml +# galaxy.yml +--- +namespace: bar +name: foo +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ORG/REPO_NAME +``` + +## Correct code + +```yaml +# galaxy.yml +--- +namespace: bar +name: foo +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ORG/REPO_NAME +tags: [networking, test_tag, test_tag_2] +``` diff --git a/src/ansiblelint/rules/galaxy.py b/src/ansiblelint/rules/galaxy.py new file mode 100644 index 0000000..cdaf4ed --- /dev/null +++ b/src/ansiblelint/rules/galaxy.py @@ -0,0 +1,232 @@ +"""Implementation of GalaxyRule.""" +from __future__ import annotations + +import os +import sys +from functools import total_ordering +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class GalaxyRule(AnsibleLintRule): + """Rule for checking collection version is greater than 1.0.0 and checking for changelog.""" + + id = "galaxy" + description = "Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0 and check for changelog." + severity = "MEDIUM" + tags = ["metadata"] + version_added = "v6.11.0 (last update)" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific play (entry in playbook).""" + if file.kind != "galaxy": # type: ignore + return [] + + # Defined by Automation Hub Team and Partner Engineering + required_tag_list = [ + "application", + "cloud", + "database", + "infrastructure", + "linux", + "monitoring", + "networking", + "security", + "storage", + "tools", + "windows", + ] + + results = [] + + base_path = os.path.split(str(file.abspath))[0] + changelog_found = 0 + changelog_paths = [ + os.path.join(base_path, "changelogs", "changelog.yaml"), + os.path.join(base_path, "CHANGELOG.rst"), + os.path.join(base_path, "CHANGELOG.md"), + ] + + for path in changelog_paths: + if os.path.isfile(path): + changelog_found = 1 + + galaxy_tag_list = data.get("tags", None) + + # Changelog Check - building off Galaxy rule as there is no current way to check + # for a nonexistent file + if not changelog_found: + results.append( + self.create_matcherror( + message="No changelog found. Please add a changelog file. Refer to the galaxy.md file for more info.", + tag="galaxy[no-changelog]", + filename=file, + ) + ) + + # Checking if galaxy.yml contains one or more required tags for certification + if not galaxy_tag_list or not any( + tag in required_tag_list for tag in galaxy_tag_list + ): + results.append( + self.create_matcherror( + message=( + f"galaxy.yaml must have one of the required tags: {required_tag_list}" + ), + tag="galaxy[tags]", + filename=file, + ) + ) + + if "version" not in data: + results.append( + self.create_matcherror( + message="galaxy.yaml should have version tag.", + linenumber=data[LINE_NUMBER_KEY], + tag="galaxy[version-missing]", + filename=file, + ) + ) + return results + # returning here as it does not make sense + # to continue for version check below + + version = data.get("version") + if Version(version) < Version("1.0.0"): + results.append( + self.create_matcherror( + message="collection version should be greater than or equal to 1.0.0", + # pylint: disable=protected-access + linenumber=version._line_number, + tag="galaxy[version-incorrect]", + filename=file, + ) + ) + + return results + + +@total_ordering +class Version: + """Simple class to compare arbitrary versions.""" + + def __init__(self, version_string: str): + """Construct a Version object.""" + self.components = version_string.split(".") + + def __eq__(self, other: object) -> bool: + """Implement equality comparison.""" + try: + other = _coerce(other) + except NotImplementedError: + return NotImplemented + + return self.components == other.components + + def __lt__(self, other: Version) -> bool: + """Implement lower-than operation.""" + other = _coerce(other) + + return self.components < other.components + + +def _coerce(other: object) -> Version: + if isinstance(other, str): + other = Version(other) + if isinstance(other, (int, float)): + other = Version(str(other)) + if isinstance(other, Version): + return other + raise NotImplementedError(f"Unable to coerce object type {type(other)} to Version") + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner + + def test_galaxy_collection_version_positive() -> None: + """Positive test for collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + success = "examples/collection/galaxy.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_galaxy_collection_version_negative() -> None: + """Negative test for collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + failure = "examples/meta/galaxy.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + + def test_galaxy_no_collection_version() -> None: + """Test for no collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + failure = "examples/no_collection_version/galaxy.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + + def test_changelog_present() -> None: + """Positive test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + good_runner = Runner("examples/collection/galaxy.yml", rules=collection) + assert [] == good_runner.run() + + def test_changelog_missing() -> None: + """Negative test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner("examples/no_changelog/galaxy.yml", rules=collection) + result = bad_runner.run() + assert len(result) == 1 + for item in result: + assert item.tag == "galaxy[no-changelog]" + + def test_version_class() -> None: + """Test for version class.""" + v = Version("1.0.0") + assert v == Version("1.0.0") + assert v != NotImplemented + + def test_coerce() -> None: + """Test for _coerce function.""" + assert _coerce("1.0") == Version("1.0") + assert _coerce(1.0) == Version("1.0") + expected = "Unable to coerce object type" + with pytest.raises(NotImplementedError, match=expected): + _coerce(type(Version)) + + def test_galaxy_tags_pass() -> None: + """Test for required tags.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner( + "examples/galaxy_no_required_tags/pass/galaxy.yml", rules=collection + ) + result = bad_runner.run() + assert len(result) == 0 + + def test_galaxy_tags_fail() -> None: + """Test for required tags.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner( + "examples/galaxy_no_required_tags/fail/galaxy.yml", rules=collection + ) + result = bad_runner.run() + assert len(result) == 1 + for item in result: + assert item.tag == "galaxy[tags]" diff --git a/src/ansiblelint/rules/ignore_errors.md b/src/ansiblelint/rules/ignore_errors.md new file mode 100644 index 0000000..cb17774 --- /dev/null +++ b/src/ansiblelint/rules/ignore_errors.md @@ -0,0 +1,61 @@ +# ignore-errors + +This rule checks that playbooks do not use the `ignore_errors` directive to ignore all errors. +Ignoring all errors in a playbook hides actual failures, incorrectly mark tasks as failed, and result in unexpected side effects and behavior. + +Instead of using the `ignore_errors: true` directive, you should do the following: + +- Ignore errors only when using the `{{ ansible_check_mode }}` variable. +- Use `register` to register errors. +- Use `failed_when:` and specify acceptable error conditions. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: true # <- Ignores all errors, including important failures. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: "{{ ansible_check_mode }}" # <- Ignores errors in check mode. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: true + register: ignore_errors_register # <- Stores errors and failures for evaluation. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Disable apport + become: "yes" + lineinfile: + line: "enabled=0" + dest: /etc/default/apport + mode: 0644 + state: present + register: default_apport + failed_when: default_apport.rc !=0 and not default_apport.rc == 257 # <- Defines conditions that constitute a failure. +``` diff --git a/src/ansiblelint/rules/ignore_errors.py b/src/ansiblelint/rules/ignore_errors.py new file mode 100644 index 0000000..e9f7c40 --- /dev/null +++ b/src/ansiblelint/rules/ignore_errors.py @@ -0,0 +1,130 @@ +"""IgnoreErrorsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class IgnoreErrorsRule(AnsibleLintRule): + """Use failed_when and specify error conditions instead of using ignore_errors.""" + + id = "ignore-errors" + description = ( + "Instead of ignoring all errors, ignore the errors only when using ``{{ ansible_check_mode }}``, " + "register the errors using ``register``, " + "or use ``failed_when:`` and specify acceptable error conditions " + "to reduce the risk of ignoring important failures." + ) + severity = "LOW" + tags = ["unpredictability"] + version_added = "v5.0.7" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if ( + task.get("ignore_errors") + and task.get("ignore_errors") != "{{ ansible_check_mode }}" + and not task.get("register") + ): + return True + + return False + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + IGNORE_ERRORS_TRUE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: true +""" + + IGNORE_ERRORS_FALSE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: false +""" + + IGNORE_ERRORS_CHECK_MODE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: "{{ ansible_check_mode }}" +""" + + IGNORE_ERRORS_REGISTER = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: true + register: ignore_errors_register +""" + + FAILED_WHEN = """ +- hosts: all + tasks: + - name: Disable apport + become: 'yes' + lineinfile: + line: "enabled=0" + dest: /etc/default/apport + mode: 0644 + state: present + register: default_apport + failed_when: default_apport.rc !=0 and not default_apport.rc == 257 +""" + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_true(rule_runner: RunFromText) -> None: + """The task uses ignore_errors.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_TRUE) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_false(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: false, oddly enough.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_FALSE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_check_mode(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: "{{ ansible_check_mode }}".""" + results = rule_runner.run_playbook(IGNORE_ERRORS_CHECK_MODE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_register(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: but output is registered and managed.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_REGISTER) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_failed_when(rule_runner: RunFromText) -> None: + """Instead of ignore_errors, this task uses failed_when.""" + results = rule_runner.run_playbook(FAILED_WHEN) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/inline_env_var.md b/src/ansiblelint/rules/inline_env_var.md new file mode 100644 index 0000000..bc83f7e --- /dev/null +++ b/src/ansiblelint/rules/inline_env_var.md @@ -0,0 +1,38 @@ +# inline-env-var + +This rule checks that playbooks do not set environment variables in the `ansible.builtin.command` module. + +You should set environment variables with the `ansible.builtin.shell` module or the `environment` keyword. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.command: MY_ENV_VAR=my_value # <- Sets an environment variable in the command module. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.shell: echo $MY_ENV_VAR + environment: + MY_ENV_VAR: my_value # <- Sets an environment variable with the environment keyword. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.shell: MY_ENV_VAR=my_value # <- Sets an environment variable with the shell module. +``` diff --git a/src/ansiblelint/rules/inline_env_var.py b/src/ansiblelint/rules/inline_env_var.py new file mode 100644 index 0000000..7fd2aa0 --- /dev/null +++ b/src/ansiblelint/rules/inline_env_var.py @@ -0,0 +1,74 @@ +"""Implementation of inside-env-var rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import FILENAME_KEY, LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import get_first_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class EnvVarsInCommandRule(AnsibleLintRule): + """Command module does not accept setting environment variables inline.""" + + id = "inline-env-var" + description = ( + "Use ``environment:`` to set environment variables " + "or use ``shell`` module which accepts both" + ) + severity = "VERY_HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + expected_args = [ + "chdir", + "creates", + "executable", + "removes", + "stdin", + "warn", + "stdin_add_newline", + "strip_empty_ends", + "cmd", + "__ansible_module__", + "__ansible_module_original__", + "__ansible_arguments__", + LINE_NUMBER_KEY, + FILENAME_KEY, + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in ["command"]: + first_cmd_arg = get_first_cmd_arg(task) + if not first_cmd_arg: + return False + + return any( + [arg not in self.expected_args for arg in task["action"]] + + ["=" in first_cmd_arg] + ) + return False diff --git a/src/ansiblelint/rules/jinja.md b/src/ansiblelint/rules/jinja.md new file mode 100644 index 0000000..8e1732e --- /dev/null +++ b/src/ansiblelint/rules/jinja.md @@ -0,0 +1,55 @@ +# jinja + +This rule can report problems related to jinja2 string templates. The current +version can report: + +- `jinja[spacing]` when there are no spaces between variables + and operators, including filters, like `{{ var_name | filter }}`. This + improves readability and makes it less likely to introduce typos. +- `jinja[invalid]` when the jinja2 template is invalid, like `{{ {{ '1' }} }}`, + which would result in a runtime error if you try to use it with Ansible, even + if it does pass the Ansible syntax check. + +As jinja2 syntax is closely following Python one we aim to follow +[black](https://black.readthedocs.io/en/stable/) formatting rules. If you are +curious how black would reformat a small sniped feel free to visit +[online black formatter](https://black.vercel.app/) site. Keep in mind to not +include the entire jinja2 template, so instead of `{{ 1+2==3 }}`, do paste +only `1+2==3`. + +In ansible, `changed_when`, `failed_when`, `until`, `when` are considered to +use implicit jinja2 templating, meaning that they do not require `{{ }}`. Our +rule will suggest the removal of the braces for these fields. + +## Problematic code + +```yaml +--- +- name: Some task + vars: + foo: "{{some|dict2items}}" # <-- jinja[spacing] + bar: "{{ & }}" # <-- jinja[invalid] + when: "{{ foo | bool }}" # <-- jinja[spacing] - 'when' has implicit templating +``` + +## Correct code + +```yaml +--- +- name: Some task + vars: + foo: "{{ some | dict2items }}" + bar: "{{ '&' }}" + when: foo | bool +``` + +## Current limitations + +In its current form, this rule presents the following limitations: + +- Jinja2 blocks that have newlines in them will not be reformatted because we + consider that the user deliberately wanted to format them in a particular way. +- Jinja2 blocks that use tilde as a binary operation are ignored because black + does not support tilde as a binary operator. Example: `{{ a ~ b }}`. +- Jinja2 blocks that use dot notation with numbers are ignored because python + and black do not allow it. Example: `{{ foo.0.bar }}` diff --git a/src/ansiblelint/rules/jinja.py b/src/ansiblelint/rules/jinja.py new file mode 100644 index 0000000..8057fd4 --- /dev/null +++ b/src/ansiblelint/rules/jinja.py @@ -0,0 +1,675 @@ +"""Rule for checking content of jinja template strings.""" +from __future__ import annotations + +import logging +import re +import sys +from collections import namedtuple +from typing import TYPE_CHECKING, Any + +import black +import jinja2 +from ansible.errors import AnsibleError, AnsibleParserError +from ansible.parsing.yaml.objects import AnsibleUnicode +from jinja2.exceptions import TemplateSyntaxError + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.skip_utils import get_rule_skips_from_line +from ansiblelint.utils import parse_yaml_from_file, template +from ansiblelint.yaml_utils import deannotate, nested_items_path + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +_logger = logging.getLogger(__package__) +KEYWORDS_WITH_IMPLICIT_TEMPLATE = ("changed_when", "failed_when", "until", "when") + +Token = namedtuple("Token", "lineno token_type value") + +ignored_re = re.compile( + "|".join( + [ + r"^Object of type method is not JSON serializable", + r"^Unexpected templating type error occurred on", + r"^obj must be a list of dicts or a nested dict$", + r"^the template file (.*) could not be found for the lookup$", + r"could not locate file in lookup", + r"unable to locate collection", + ] + ) +) + + +class JinjaRule(AnsibleLintRule): + """Rule that looks inside jinja2 templates.""" + + id = "jinja" + severity = "LOW" + tags = ["formatting"] + version_added = "v6.5.0" + _ansible_error_re = re.compile( + r"^(?P<error>.*): (?P<detail>.*)\. String: (?P<string>.*)$", flags=re.MULTILINE + ) + + env = jinja2.Environment(trim_blocks=False) + _tag2msg = { + "invalid": "Syntax error in jinja2 template: {value}", + "spacing": "Jinja2 spacing could be improved: {value} -> {reformatted}", + } + + def _msg(self, tag: str, value: str, reformatted: str) -> str: + """Generate error message.""" + return self._tag2msg[tag].format(value=value, reformatted=reformatted) + + # pylint: disable=too-many-branches,too-many-locals + def matchtask( # noqa: C901 + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + try: + for key, v, path in nested_items_path( + task, + ignored_keys=("block", "ansible.builtin.block", "ansible.legacy.block"), + ): + if isinstance(v, str): + try: + template( + basedir=file.dir if file else ".", + value=v, + variables=deannotate(task.get("vars", {})), + fail_on_error=True, # we later decide which ones to ignore or not + ) + # ValueError RepresenterError + except AnsibleError as exc: + bypass = False + orig_exc = ( + exc.orig_exc if getattr(exc, "orig_exc", None) else exc + ) + orig_exc_message = getattr(orig_exc, "message", str(orig_exc)) + match = self._ansible_error_re.match( + getattr(orig_exc, "message", str(orig_exc)) + ) + if ignored_re.match(orig_exc_message): + bypass = True + elif isinstance(orig_exc, AnsibleParserError): + # "An unhandled exception occurred while running the lookup plugin '...'. Error was a <class 'ansible.errors.AnsibleParserError'>, original message: Invalid filename: 'None'. Invalid filename: 'None'" + + # An unhandled exception occurred while running the lookup plugin 'template'. Error was a <class 'ansible.errors.AnsibleError'>, original message: the template file ... could not be found for the lookup. the template file ... could not be found for the lookup + + # ansible@devel (2.14) new behavior: + # AnsibleError(TemplateSyntaxError): template error while templating string: Could not load "ipwrap": 'Invalid plugin FQCN (ansible.netcommon.ipwrap): unable to locate collection ansible.netcommon'. String: Foo {{ buildset_registry.host | ipwrap }}. Could not load "ipwrap": 'Invalid plugin FQCN (ansible.netcommon.ipwrap): unable to locate collection ansible.netcommon' + bypass = True + elif ( + isinstance(orig_exc, (AnsibleError, TemplateSyntaxError)) + and match + ): + error = match.group("error") + detail = match.group("detail") + # string = match.group("string") + if error.startswith( + "template error while templating string" + ): + bypass = False + elif detail.startswith("unable to locate collection"): + _logger.debug("Ignored AnsibleError: %s", exc) + bypass = True + else: + bypass = False + elif re.match(r"^lookup plugin (.*) not found$", exc.message): + # lookup plugin 'template' not found + bypass = True + + # AnsibleFilterError: 'obj must be a list of dicts or a nested dict' + # AnsibleError: template error while templating string: expected token ':', got '}'. String: {{ {{ '1' }} }} + # AnsibleError: template error while templating string: unable to locate collection ansible.netcommon. String: Foo {{ buildset_registry.host | ipwrap }} + if not bypass: + result.append( + self.create_matcherror( + message=str(exc), + linenumber=_get_error_line(task, path), + filename=file, + tag=f"{self.id}[invalid]", + ) + ) + continue + reformatted, details, tag = self.check_whitespace( + v, key=key, lintable=file + ) + if reformatted != v: + result.append( + self.create_matcherror( + message=self._msg( + tag=tag, value=v, reformatted=reformatted + ), + linenumber=_get_error_line(task, path), + details=details, + filename=file, + tag=f"{self.id}[{tag}]", + ) + ) + except Exception as exc: + _logger.info("Exception in JinjaRule.matchtask: %s", exc) + raise + return result + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches for variables defined in vars files.""" + data: dict[str, Any] = {} + raw_results: list[MatchError] = [] + results: list[MatchError] = [] + + if str(file.kind) == "vars": + data = parse_yaml_from_file(str(file.path)) + # pylint: disable=unused-variable + for key, v, path in nested_items_path(data): + if isinstance(v, AnsibleUnicode): + reformatted, details, tag = self.check_whitespace( + v, key=key, lintable=file + ) + if reformatted != v: + results.append( + self.create_matcherror( + message=self._msg( + tag=tag, value=v, reformatted=reformatted + ), + linenumber=v.ansible_pos[1], + details=details, + filename=file, + tag=f"{self.id}[{tag}]", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + else: + results.extend(super().matchyaml(file)) + return results + + def lex(self, text: str) -> list[Token]: + """Parse jinja template.""" + # https://github.com/pallets/jinja/issues/1711 + self.env.keep_trailing_newline = True + + self.env.lstrip_blocks = False + self.env.trim_blocks = False + tokens = [ + Token(lineno=t[0], token_type=t[1], value=t[2]) for t in self.env.lex(text) + ] + new_text = self.unlex(tokens) + if text != new_text: + _logger.debug( + "Unable to perform full roundtrip lex-unlex on jinja template (expected when '-' modifier is used): {text} -> {new_text}" + ) + return tokens + + def unlex(self, tokens: list[Token]) -> str: + """Return original text by compiling the lex output.""" + result = "" + last_lineno = 1 + last_value = "" + for lineno, _, value in tokens: + if lineno > last_lineno and "\n" not in last_value: + result += "\n" + result += value + last_lineno = lineno + last_value = value + return result + + # pylint: disable=too-many-branches,too-many-statements,too-many-locals + def check_whitespace( # noqa: max-complexity: 13 + self, text: str, key: str, lintable: Lintable | None = None + ) -> tuple[str, str, str]: + """Check spacing inside given jinja2 template string. + + We aim to match Python Black formatting rules. + :raises NotImplementedError: On few cases where valid jinja is not valid Python. + + :returns: (string, string, string) reformatted text, detailed error, error tag + """ + + def cook(value: str, implicit: bool = False) -> str: + """Prepare an implicit string for jinja parsing when needed.""" + if not implicit: + return value + if value.startswith("{{") and value.endswith("}}"): + # maybe we should make this an error? + return value + return f"{{{{ {value} }}}}" + + def uncook(value: str, implicit: bool = False) -> str: + """Restore an string to original form when it was an implicit one.""" + if not implicit: + return value + return value[3:-3] + + tokens = [] + details = "" + begin_types = ("variable_begin", "comment_begin", "block_begin") + end_types = ("variable_end", "comment_end", "block_end") + implicit = False + + # implicit templates do not have the {{ }} wrapping + if ( + key in KEYWORDS_WITH_IMPLICIT_TEMPLATE + and lintable + and lintable.kind + in ( + "playbook", + "task", + ) + ): + implicit = True + text = cook(text, implicit=implicit) + + expr_str = None + expr_type = None + verb_skipped = True + lineno = 1 + try: + for token in self.lex(text): + if ( + expr_type + and expr_type.startswith("{%") + and token.token_type in ("name", "whitespace") + and not verb_skipped + ): + # on {% blocks we do not take first word as part of the expression + tokens.append(token) + if token.token_type != "whitespace": + verb_skipped = True + elif token.token_type in begin_types: + tokens.append(token) + expr_type = token.value # such {#, {{, {% + expr_str = "" + verb_skipped = False + elif token.token_type in end_types and expr_str is not None: + # process expression + # pylint: disable=unsupported-membership-test + if isinstance(expr_str, str) and "\n" in expr_str: + raise NotImplementedError() + leading_spaces = " " * (len(expr_str) - len(expr_str.lstrip())) + expr_str = leading_spaces + blacken(expr_str.lstrip()) + if tokens[ + -1 + ].token_type != "whitespace" and not expr_str.startswith(" "): + expr_str = " " + expr_str + if not expr_str.endswith(" "): + expr_str += " " + tokens.append(Token(lineno, "data", expr_str)) + tokens.append(token) + expr_str = None + expr_type = None + elif expr_str is not None: + expr_str += token.value + else: + tokens.append(token) + lineno = token.lineno + + except jinja2.exceptions.TemplateSyntaxError as exc: + return "", str(exc.message), "invalid" + # https://github.com/PyCQA/pylint/issues/7433 - py311 only + # pylint: disable=c-extension-no-member + except (NotImplementedError, black.parsing.InvalidInput) as exc: + # black is not able to recognize all valid jinja2 templates, so we + # just ignore InvalidInput errors. + # NotImplementedError is raised internally for expressions with + # newlines, as we decided to not touch them yet. + # These both are documented as known limitations. + _logger.debug("Ignored jinja internal error %s", exc) + return uncook(text, implicit), "", "spacing" + + # finalize + reformatted = self.unlex(tokens) + failed = reformatted != text + reformatted = uncook(reformatted, implicit) + details = ( + f"Jinja2 template rewrite recommendation: `{reformatted}`." + if failed + else "" + ) + return reformatted, details, "spacing" + + +def blacken(text: str) -> str: + """Format Jinja2 template using black.""" + return black.format_str( + text, mode=black.FileMode(line_length=sys.maxsize, string_normalization=False) + ).rstrip("\n") + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.fixture(name="error_expected_lines") + def fixture_error_expected_lines() -> list[int]: + """Return list of expected error lines.""" + return [33, 36, 39, 42, 45, 48, 74] + + # 21 68 + @pytest.fixture(name="lint_error_lines") + def fixture_lint_error_lines() -> list[int]: + """Get VarHasSpacesRules linting results on test_playbook.""" + collection = RulesCollection() + collection.register(JinjaRule()) + lintable = Lintable("examples/playbooks/jinja-spacing.yml") + results = Runner(lintable, rules=collection).run() + return list(map(lambda item: item.linenumber, results)) + + def test_jinja_spacing_playbook( + error_expected_lines: list[int], lint_error_lines: list[int] + ) -> None: + """Ensure that expected error lines are matching found linting error lines.""" + # list unexpected error lines or non-matching error lines + error_lines_difference = list( + set(error_expected_lines).symmetric_difference(set(lint_error_lines)) + ) + assert len(error_lines_difference) == 0 + + def test_jinja_spacing_vars() -> None: + """Ensure that expected error details are matching found linting error details.""" + collection = RulesCollection() + collection.register(JinjaRule()) + lintable = Lintable("examples/playbooks/vars/jinja-spacing.yml") + results = Runner(lintable, rules=collection).run() + + error_expected_lineno = [14, 15, 16, 17, 18, 19, 32] + assert len(results) == len(error_expected_lineno) + for idx, err in enumerate(results): + assert err.linenumber == error_expected_lineno[idx] + + @pytest.mark.parametrize( + ("text", "expected", "tag"), + ( + pytest.param( + "{{-x}}{#a#}{%1%}", + "{{- x }}{# a #}{% 1 %}", + "spacing", + id="add-missing-space", + ), + pytest.param("", "", "spacing", id="1"), + pytest.param("foo", "foo", "spacing", id="2"), + pytest.param("{##}", "{# #}", "spacing", id="3"), + # we want to keep leading spaces as they might be needed for complex multiline jinja files + pytest.param("{# #}", "{# #}", "spacing", id="4"), + pytest.param( + "{{-aaa|xx }}foo\nbar{#some#}\n{%%}", + "{{- aaa | xx }}foo\nbar{# some #}\n{% %}", + "spacing", + id="5", + ), + pytest.param( + "Shell with jinja filter", "Shell with jinja filter", "spacing", id="6" + ), + pytest.param( + "{{{'dummy_2':1}|true}}", + "{{ {'dummy_2': 1} | true }}", + "spacing", + id="7", + ), + pytest.param("{{{foo:{}}}}", "{{ {foo: {}} }}", "spacing", id="8"), + pytest.param( + "{{ {'test': {'subtest': variable}} }}", + "{{ {'test': {'subtest': variable}} }}", + "spacing", + id="9", + ), + pytest.param( + "http://foo.com/{{\n case1 }}", + "http://foo.com/{{\n case1 }}", + "spacing", + id="10", + ), + pytest.param("{{foo(123)}}", "{{ foo(123) }}", "spacing", id="11"), + pytest.param("{{ foo(a.b.c) }}", "{{ foo(a.b.c) }}", "spacing", id="12"), + # pytest.param( + # "{{ foo | bool else [ ] }}", + # "{{ foo | bool else [] }}", + # "spacing", + # id="13", + # ), + pytest.param( + "{{foo(x =['server_options'])}}", + "{{ foo(x=['server_options']) }}", + "spacing", + id="14", + ), + pytest.param( + '{{ [ "host", "NA"] }}', '{{ ["host", "NA"] }}', "spacing", id="15" + ), + pytest.param( + "{{ {'dummy_2': {'nested_dummy_1': value_1,\n 'nested_dummy_2': value_2}} |\ncombine(dummy_1) }}", + "{{ {'dummy_2': {'nested_dummy_1': value_1,\n 'nested_dummy_2': value_2}} |\ncombine(dummy_1) }}", + "spacing", + id="17", + ), + pytest.param("{{ & }}", "", "invalid", id="18"), + pytest.param( + "{{ good_format }}/\n{{- good_format }}\n{{- good_format -}}\n", + "{{ good_format }}/\n{{- good_format }}\n{{- good_format -}}\n", + "spacing", + id="19", + ), + pytest.param( + "{{ {'a': {'b': 'x', 'c': y}} }}", + "{{ {'a': {'b': 'x', 'c': y}} }}", + "spacing", + id="20", + ), + pytest.param( + "2*(1+(3-1)) is {{ 2 * {{ 1 + {{ 3 - 1 }}}} }}", + "2*(1+(3-1)) is {{ 2 * {{1 + {{3 - 1}}}} }}", + "spacing", + id="21", + ), + pytest.param( + '{{ "absent"\nif (v is version("2.8.0", ">=")\nelse "present" }}', + "", + "invalid", + id="22", + ), + pytest.param( + '{{lookup("x",y+"/foo/"+z+".txt")}}', + '{{ lookup("x", y + "/foo/" + z + ".txt") }}', + "spacing", + id="23", + ), + pytest.param( + "{{ x | map(attribute='value') }}", + "{{ x | map(attribute='value') }}", + "spacing", + id="24", + ), + pytest.param( + "{{ r(a= 1,b= True,c= 0.0,d= '') }}", + "{{ r(a=1, b=True, c=0.0, d='') }}", + "spacing", + id="25", + ), + pytest.param("{{ r(1,[]) }}", "{{ r(1, []) }}", "spacing", id="26"), + pytest.param( + "{{ lookup([ddd ]) }}", "{{ lookup([ddd]) }}", "spacing", id="27" + ), + pytest.param( + "{{ [ x ] if x is string else x }}", + "{{ [x] if x is string else x }}", + "spacing", + id="28", + ), + pytest.param( + # "{% if a|int <= 8 -%} iptables {%- else -%} iptables-nft {%- endif %}", + "{% if a|int <= 8 -%} iptables {%- else -%} iptables-nft {%- endif %}", + "{% if a | int <= 8 -%} iptables{%- else -%} iptables-nft{%- endif %}", + "spacing", + id="29", + ), + pytest.param( + # "- 2" -> "-2", minus does not get separated when there is no left side + "{{ - 2 }}", + "{{ -2 }}", + "spacing", + id="30", + ), + pytest.param( + # "-2" -> "-2", minus does get an undesired spacing + "{{ -2 }}", + "{{ -2 }}", + "spacing", + id="31", + ), + pytest.param( + # array ranges do not have space added + "{{ foo[2:4] }}", + "{{ foo[2:4] }}", + "spacing", + id="32", + ), + pytest.param( + # array ranges have the extra space removed + "{{ foo[2: 4] }}", + "{{ foo[2:4] }}", + "spacing", + id="33", + ), + pytest.param( + # negative array index + "{{ foo[-1] }}", + "{{ foo[-1] }}", + "spacing", + id="34", + ), + pytest.param( + # negative array index, repair + "{{ foo[- 1] }}", + "{{ foo[-1] }}", + "spacing", + id="35", + ), + pytest.param("{{ a +~'b' }}", "{{ a + ~'b' }}", "spacing", id="36"), + pytest.param( + "{{ (a[: -4] *~ b) }}", "{{ (a[:-4] * ~b) }}", "spacing", id="37" + ), + pytest.param("{{ [a,~ b] }}", "{{ [a, ~b] }}", "spacing", id="38"), + # Not supported yet due to being accepted by black: + pytest.param("{{ item.0.user }}", "{{ item.0.user }}", "spacing", id="39"), + # Not supported by back, while jinja allows ~ to be binary operator: + pytest.param("{{ a ~ b }}", "{{ a ~ b }}", "spacing", id="40"), + pytest.param( + "--format='{{'{{'}}.Size{{'}}'}}'", + "--format='{{ '{{' }}.Size{{ '}}' }}'", + "spacing", + id="41", + ), + pytest.param( + "{{ list_one + {{ list_two | max }} }}", + "{{ list_one + {{list_two | max}} }}", + "spacing", + id="42", + ), + pytest.param( + "{{ lookup('file' , '/tmp/non-existent', errors='ignore') }}", + "{{ lookup('file', '/tmp/non-existent', errors='ignore') }}", + "spacing", + id="43", + ), + ), + ) + def test_jinja(text: str, expected: str, tag: str) -> None: + """Tests our ability to spot spacing errors inside jinja2 templates.""" + rule = JinjaRule() + + reformatted, details, returned_tag = rule.check_whitespace( + text, key="name", lintable=Lintable("playbook.yml") + ) + assert tag == returned_tag, details + assert expected == reformatted + + @pytest.mark.parametrize( + ("text", "expected", "tag"), + ( + pytest.param( + "1+2", + "1 + 2", + "spacing", + id="0", + ), + pytest.param( + "- 1", + "-1", + "spacing", + id="1", + ), + # Ensure that we do not choke with double templating on implicit + # and instead we remove them braces. + pytest.param("{{ o | bool }}", "o | bool", "spacing", id="2"), + ), + ) + def test_jinja_implicit(text: str, expected: str, tag: str) -> None: + """Tests our ability to spot spacing errors implicit jinja2 templates.""" + rule = JinjaRule() + # implicit jinja2 are working only inside playbooks and tasks + lintable = Lintable(name="playbook.yml", kind="playbook") + reformatted, details, returned_tag = rule.check_whitespace( + text, key="when", lintable=lintable + ) + assert tag == returned_tag, details + assert expected == reformatted + + @pytest.mark.parametrize( + ("lintable", "matches"), + (pytest.param("examples/playbooks/vars/rule_jinja_vars.yml", 0, id="0"),), + ) + def test_jinja_file(lintable: str, matches: int) -> None: + """Tests our ability to process var filesspot spacing errors.""" + collection = RulesCollection() + collection.register(JinjaRule()) + errs = Runner(lintable, rules=collection).run() + assert len(errs) == matches + for err in errs: + assert isinstance(err, JinjaRule) + assert errs[0].tag == "jinja[invalid]" + assert errs[0].rule.id == "jinja" + + def test_jinja_invalid() -> None: + """Tests our ability to spot spacing errors inside jinja2 templates.""" + collection = RulesCollection() + collection.register(JinjaRule()) + success = "examples/playbooks/rule-jinja-invalid.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 2 + assert errs[0].tag == "jinja[spacing]" + assert errs[0].rule.id == "jinja" + assert errs[0].linenumber == 9 + assert errs[1].tag == "jinja[invalid]" + assert errs[1].rule.id == "jinja" + assert errs[1].linenumber == 9 + + def test_jinja_valid() -> None: + """Tests our ability to parse jinja, even when variables may not be defined.""" + collection = RulesCollection() + collection.register(JinjaRule()) + success = "examples/playbooks/rule-jinja-valid.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 0 + + +def _get_error_line(task: dict[str, Any], path: list[str | int]) -> int: + """Return error line number.""" + line = task[LINE_NUMBER_KEY] + ctx = task + for _ in path: + ctx = ctx[_] + if LINE_NUMBER_KEY in ctx: + line = ctx[LINE_NUMBER_KEY] + if not isinstance(line, int): + raise RuntimeError("Line number is not an integer") + return line diff --git a/src/ansiblelint/rules/key_order.md b/src/ansiblelint/rules/key_order.md new file mode 100644 index 0000000..378d8a5 --- /dev/null +++ b/src/ansiblelint/rules/key_order.md @@ -0,0 +1,63 @@ +# key-order + +This rule recommends reordering key names in ansible content to make +code easier to maintain and less prone to errors. + +Here are some examples of common ordering checks done for tasks and handlers: + +- `name` must always be the first key for plays, tasks and handlers +- on tasks, the `block`, `rescue` and `always` keys must be the last keys, + as this would avoid accidental miss-indentation errors between the last task + and the parent level. + +## Problematic code + +```yaml +--- +- hosts: localhost + name: This is a playbook # <-- name key should be the first one + tasks: + - name: A block + block: + - name: Display a message + debug: + msg: "Hello world!" + when: true # <-- when key should be before block +``` + +## Correct code + +```yaml +--- +- name: This is a playbook + hosts: localhost + tasks: + - name: A block + when: true + block: + - name: Display a message + debug: + msg: "Hello world!" +``` + +## Reasoning + +Making decisions about the optimal order of keys for ansible tasks or plays is +no easy task, as we had a huge number of combinations to consider. This is also +the reason why we started with a minimal sorting rule (name to be the first), +and aimed to gradually add more fields later, and only when we find the proofs +that one approach is likely better than the other. + +### Why I no longer can put `when` after a `block`? + +Try to remember that in real life, `block/rescue/always` have the habit to +grow due to the number of tasks they host inside, making them exceed what a single screen. This would move the `when` task further away from the rest of the task properties. A `when` from the last task inside the block can +easily be confused as being at the block level, or the reverse. When tasks are +moved from one location to another, there is a real risk of moving the block +level when with it. + +By putting the `when` before the `block`, we avoid that kind of risk. The same risk applies to any simple property at the task level, so that is why +we concluded that the block keys must be the last ones. + +Another common practice was to put `tags` as the last property. Still, for the +same reasons, we decided that they should not be put after block keys either. diff --git a/src/ansiblelint/rules/key_order.py b/src/ansiblelint/rules/key_order.py new file mode 100644 index 0000000..5de637c --- /dev/null +++ b/src/ansiblelint/rules/key_order.py @@ -0,0 +1,155 @@ +"""All tasks should be have name come first.""" +from __future__ import annotations + +import functools +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.testing import RunFromText + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +SORTER_TASKS = ( + "name", + # "__module__", + # "action", + # "args", + None, # <-- None include all modules that not using action and * + # "when", + # "(loop|loop_|with_).*", + # "notify", + # "tags", + "block", + "rescue", + "always", +) + + +def get_property_sort_index(name: str) -> int: + """Return the index of the property in the sorter.""" + a_index = -1 + for i, v in enumerate(SORTER_TASKS): + if v == name: + return i + if v is None: + a_index = i + return a_index + + +def task_property_sorter(property1: str, property2: str) -> int: + """Sort task properties based on SORTER.""" + v_1 = get_property_sort_index(property1) + v_2 = get_property_sort_index(property2) + return (v_1 > v_2) - (v_1 < v_2) + + +class KeyOrderRule(AnsibleLintRule): + """Ensure specific order of keys in mappings.""" + + id = "key-order" + shortdesc = __doc__ + severity = "LOW" + tags = ["formatting"] + version_added = "v6.6.2" + needs_raw_task = True + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + raw_task = task["__raw_task__"] + keys = [key for key in raw_task.keys() if not key.startswith("_")] + sorted_keys = sorted(keys, key=functools.cmp_to_key(task_property_sorter)) + if keys != sorted_keys: + result.append( + self.create_matcherror( + f"You can improve the task key order to: {', '.join(sorted_keys)}", + filename=file, + tag="key-order[task]", + ) + ) + return result + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + PLAY_SUCCESS = """--- +- hosts: localhost + tasks: + - name: Test + command: echo "test" + - name: Test2 + debug: + msg: "Debug without a name" + - name: Flush handlers + meta: flush_handlers + - no_log: true # noqa: key-order + shell: echo hello + name: Task with no_log on top +""" + + @pytest.mark.parametrize("rule_runner", (KeyOrderRule,), indirect=["rule_runner"]) + def test_key_order_task_name_has_name_first_rule_pass( + rule_runner: RunFromText, + ) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(PLAY_SUCCESS) + assert len(results) == 0 + + @pytest.mark.parametrize("rule_runner", (KeyOrderRule,), indirect=["rule_runner"]) + def test_key_order_task_name_has_name_first_rule_fail( + rule_runner: RunFromText, + ) -> None: + """Test rule matches.""" + results = rule_runner.run("examples/playbooks/rule-key-order-fail.yml") + assert len(results) == 6 + + @pytest.mark.parametrize( + ("properties", "expected"), + ( + pytest.param([], []), + pytest.param(["block", "name"], ["name", "block"]), + pytest.param( + ["block", "name", "action", "..."], ["name", "action", "...", "block"] + ), + ), + ) + def test_key_order_property_sorter( + properties: list[str], expected: list[str] + ) -> None: + """Test the task property sorter.""" + result = sorted(properties, key=functools.cmp_to_key(task_property_sorter)) + assert expected == result + + @pytest.mark.parametrize( + ("key", "order"), + ( + pytest.param("name", 0), + pytest.param("action", 1), + pytest.param("foobar", SORTER_TASKS.index(None)), + pytest.param("block", len(SORTER_TASKS) - 3), + pytest.param("rescue", len(SORTER_TASKS) - 2), + pytest.param("always", len(SORTER_TASKS) - 1), + ), + ) + def test_key_order_property_sort_index(key: str, order: int) -> None: + """Test sorting index.""" + assert get_property_sort_index(key) == order + + @pytest.mark.parametrize( + ("prop1", "prop2", "result"), + ( + pytest.param("name", "block", -1), + pytest.param("block", "name", 1), + pytest.param("block", "block", 0), + ), + ) + def test_key_order_property_sortfunc(prop1: str, prop2: str, result: int) -> None: + """Test sorting function.""" + assert task_property_sorter(prop1, prop2) == result diff --git a/src/ansiblelint/rules/latest.md b/src/ansiblelint/rules/latest.md new file mode 100644 index 0000000..1b20432 --- /dev/null +++ b/src/ansiblelint/rules/latest.md @@ -0,0 +1,43 @@ +# latest + +The `latest` rule checks that module arguments like those used for source +control checkout do not have arguments that might generate different results +based on context. + +This more generic rule replaced two older rules named `git-latest` and +`hg-latest`. + +We are aware that there are genuine cases where getting the tip of the main +branch is not accidental. For these cases, just add a comment such as +`# noqa: latest` to the same line to prevent it from triggering. + +## Possible errors messages: + +- `latest[git]` +- `latest[hg]` + +## Problematic code + +```yaml +--- +- name: Example for `latest` rule + hosts: localhost + tasks: + - name: Risky use of git module + ansible.builtin.git: + repo: "https://github.com/ansible/ansible-lint" + version: HEAD # <-- HEAD value is triggering the rule +``` + +## Correct code + +```yaml +--- +- name: Example for `latest` rule + hosts: localhost + tasks: + - name: Safe use of git module + ansible.builtin.git: + repo: "https://github.com/ansible/ansible-lint" + version: abcd1234... # <-- that is safe +``` diff --git a/src/ansiblelint/rules/latest.py b/src/ansiblelint/rules/latest.py new file mode 100644 index 0000000..a21fdf5 --- /dev/null +++ b/src/ansiblelint/rules/latest.py @@ -0,0 +1,39 @@ +"""Implementation of latest rule.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class LatestRule(AnsibleLintRule): + """Result of the command may vary on subsequent runs.""" + + id = "latest" + description = ( + "All version control checkouts must point to " + "an explicit commit or tag, not just ``latest``" + ) + severity = "MEDIUM" + tags = ["idempotency"] + version_added = "v6.5.2" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError: + """Check if module args are safe.""" + if ( + task["action"]["__ansible_module__"] == "git" + and task["action"].get("version", "HEAD") == "HEAD" + ): + return self.create_matcherror(tag="latest[git]", filename=file) + if ( + task["action"]["__ansible_module__"] == "hg" + and task["action"].get("revision", "default") == "default" + ): + return self.create_matcherror(tag="latest[hg]", filename=file) + return False diff --git a/src/ansiblelint/rules/literal_compare.md b/src/ansiblelint/rules/literal_compare.md new file mode 100644 index 0000000..606402c --- /dev/null +++ b/src/ansiblelint/rules/literal_compare.md @@ -0,0 +1,29 @@ +# literal-compare + +This rule checks for literal comparison with the `when` clause. +Literal comparison, like `when: var == True`, is unnecessarily complex. +Use `when: var` to keep your playbooks simple. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Print environment variable to stdout + ansible.builtin.command: echo $MY_ENV_VAR + when: ansible_os_family == True # <- Adds complexity to your playbook. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Print environment variable to stdout + ansible.builtin.command: echo $MY_ENV_VAR + when: ansible_os_family # <- Keeps your playbook simple. +``` diff --git a/src/ansiblelint/rules/literal_compare.py b/src/ansiblelint/rules/literal_compare.py new file mode 100644 index 0000000..e81be15 --- /dev/null +++ b/src/ansiblelint/rules/literal_compare.py @@ -0,0 +1,48 @@ +"""Implementation of the literal-compare rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018-2021, Ansible Project + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ComparisonToLiteralBoolRule(AnsibleLintRule): + """Don't compare to literal True/False.""" + + id = "literal-compare" + description = ( + "Use ``when: var`` rather than ``when: var == True`` " + "(or conversely ``when: not var``)" + ) + severity = "HIGH" + tags = ["idiom"] + version_added = "v4.0.0" + + literal_bool_compare = re.compile("[=!]= ?(True|true|False|false)") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + for k, v, _ in nested_items_path(task): + if k == "when": + if isinstance(v, str): + if self.literal_bool_compare.search(v): + return True + elif isinstance(v, bool): + pass + else: + for item in v: + if isinstance(item, str) and self.literal_bool_compare.search( + item + ): + return True + + return False diff --git a/src/ansiblelint/rules/loop_var_prefix.md b/src/ansiblelint/rules/loop_var_prefix.md new file mode 100644 index 0000000..33adbd7 --- /dev/null +++ b/src/ansiblelint/rules/loop_var_prefix.md @@ -0,0 +1,78 @@ +# loop-var-prefix + +This rule avoids conflicts with nested looping tasks by configuring a variable +prefix with `loop_var`. Ansible sets `item` as the loop variable. You can use +`loop_var` to specify a prefix for loop variables and ensure they are unique to +each task. + +This rule can produce the following messages: + +- `loop-var-prefix[missing]` - Replace any unsafe implicit `item` loop variable + by adding `loop_var: <loop_var_prefix>...`. +- `loop-var-prefix[wrong]` - Ensure loop variables start with + `<loop_var_prefix>`. + +This rule originates from the [Naming parameters section of Ansible Best +Practices guide][cop314]. + +## Settings + +You can change the behavior of this rule by overriding its default regular +expression used to check loop variable naming. Keep in mind that the `{role}` +part is replaced with the inferred role name when applicable. + +```yaml +# .ansible-lint +loop_var_prefix: "^(__|{role}_)" +``` + +This is an opt-in rule. You must enable it in your Ansible-lint configuration as +follows: + +```yaml +enable_list: + - loop-var-prefix +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Does not set a prefix for loop variables. + ansible.builtin.debug: + var: item + loop: + - foo + - bar # <- These items do not have a unique prefix. + - name: Sets a prefix that is not unique. + ansible.builtin.debug: + var: zz_item + loop: + - foo + - bar + loop_control: + loop_var: zz_item # <- This prefix is not unique. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Sets a unique prefix for loop variables. + ansible.builtin.debug: + var: zz_item + loop: + - foo + - bar + loop_control: + loop_var: my_prefix # <- Specifies a unique prefix for loop variables. +``` + +[cop314]: + https://redhat-cop.github.io/automation-good-practices/#_naming_parameters diff --git a/src/ansiblelint/rules/loop_var_prefix.py b/src/ansiblelint/rules/loop_var_prefix.py new file mode 100644 index 0000000..cc909a3 --- /dev/null +++ b/src/ansiblelint/rules/loop_var_prefix.py @@ -0,0 +1,100 @@ +"""Optional Ansible-lint rule to enforce use of prefix on role loop vars.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.config import LOOP_VAR_PREFIX, options +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import toidentifier + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RoleLoopVarPrefix(AnsibleLintRule): + """Role loop_var should use configured prefix.""" + + id = "loop-var-prefix" + link = ( + "https://docs.ansible.com/ansible/latest/playbook_guide/" + "playbooks_loops.html#defining-inner-and-outer-variable-names-with-loop-var" + ) + description = """\ +Looping inside roles has the risk of clashing with loops from user-playbooks.\ +""" + + tags = ["idiom"] + prefix = re.compile("") + severity = "MEDIUM" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for a task.""" + if not file or not file.role or not options.loop_var_prefix: + return [] + + self.prefix = re.compile( + options.loop_var_prefix.format(role=toidentifier(file.role)) + ) + has_loop = "loop" in task + for key in task.keys(): + if key.startswith("with_"): + has_loop = True + + if has_loop: + loop_control = task.get("loop_control", {}) + loop_var = loop_control.get("loop_var", "") + + if loop_var: + if not self.prefix.match(loop_var): + return [ + self.create_matcherror( + message=f"Loop variable name does not match /{options.loop_var_prefix}/ regex, where role={toidentifier(file.role)}.", + filename=file, + tag="loop-var-prefix[wrong]", + ) + ] + else: + return [ + self.create_matcherror( + message=f"Replace unsafe implicit `item` loop variable by adding a `loop_var` that is matching /{options.loop_var_prefix}/ regex.", + filename=file, + tag="loop-var-prefix[missing]", + ) + ] + + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures"), + ( + pytest.param( + "examples/playbooks/roles/loop_var_prefix/tasks/pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/roles/loop_var_prefix/tasks/fail.yml", 5, id="fail" + ), + ), + ) + def test_loop_var_prefix( + default_rules_collection: RulesCollection, test_file: str, failures: int + ) -> None: + """Test rule matches.""" + # Enable checking of loop variable prefixes in roles + options.loop_var_prefix = LOOP_VAR_PREFIX + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == RoleLoopVarPrefix().id + assert len(results) == failures diff --git a/src/ansiblelint/rules/meta_incorrect.md b/src/ansiblelint/rules/meta_incorrect.md new file mode 100644 index 0000000..b1e8793 --- /dev/null +++ b/src/ansiblelint/rules/meta_incorrect.md @@ -0,0 +1,32 @@ +# meta-incorrect + +This rule checks role metadata for fields with undefined or default values. +Always set appropriate values for the following metadata fields in the `meta/main.yml` file: + +- `author` +- `description` +- `company` +- `license` + +## Problematic Code + +```yaml +--- +# Metadata fields for the role contain default values. +galaxy_info: + author: your name + description: your role description + company: your company (optional) + license: license (GPL-2.0-or-later, MIT, etc) +``` + +## Correct Code + +```yaml +--- +galaxy_info: + author: Leroy Jenkins + description: This role will set you free. + company: Red Hat + license: Apache +``` diff --git a/src/ansiblelint/rules/meta_incorrect.py b/src/ansiblelint/rules/meta_incorrect.py new file mode 100644 index 0000000..160b8ec --- /dev/null +++ b/src/ansiblelint/rules/meta_incorrect.py @@ -0,0 +1,56 @@ +"""Implementation of meta-incorrect rule.""" +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaChangeFromDefaultRule(AnsibleLintRule): + """meta/main.yml default values should be changed.""" + + id = "meta-incorrect" + field_defaults = [ + ("author", "your name"), + ("description", "your description"), + ("company", "your company (optional)"), + ("license", "license (GPLv2, CC-BY, etc)"), + ("license", "license (GPL-2.0-or-later, MIT, etc)"), + ] + values = ", ".join(sorted({f[0] for f in field_defaults})) + description = ( + f"You should set appropriate values in meta/main.yml for these fields: {values}" + ) + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + def matchyaml(self, file: Lintable) -> list[MatchError]: + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + results = [] + for field, default in self.field_defaults: + value = galaxy_info.get(field, None) + if value and value == default: + results.append( + self.create_matcherror( + filename=file, + linenumber=file.data[LINE_NUMBER_KEY], + message=f"Should change default metadata: {field}", + ) + ) + + return results diff --git a/src/ansiblelint/rules/meta_no_info.md b/src/ansiblelint/rules/meta_no_info.md new file mode 100644 index 0000000..7617cd2 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_info.md @@ -0,0 +1,28 @@ +# meta-no-info + +This rule checks role metadata for missing information. +Always set appropriate values for the following metadata fields in the `meta/main.yml` file, under `galaxy_info` key: + +- `platforms` +- `min_ansible_version` + +## Problematic Code + +```yaml +--- +# The metadata fields for minimum Ansible version and supported platforms are not set. +galaxy_info: + min_ansible_version: +``` + +## Correct Code + +```yaml +--- +galaxy_info: + min_ansible_version: "2.8" + platforms: + - name: Fedora + versions: + - all +``` diff --git a/src/ansiblelint/rules/meta_no_info.py b/src/ansiblelint/rules/meta_no_info.py new file mode 100644 index 0000000..0e3c046 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_info.py @@ -0,0 +1,83 @@ +"""Implementation of meta-no-info rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +from typing import TYPE_CHECKING, Generator + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from typing import Any, Tuple + + +META_STR_INFO = ("author", "description") +META_INFO = tuple( + list(META_STR_INFO) + + [ + "license", + "min_ansible_version", + "platforms", + ] +) + + +def _platform_info_errors_itr( + platforms: list[dict[str, str]], +) -> Generator[str, None, None]: + if not isinstance(platforms, list): + yield "Platforms should be a list of dictionaries" + return + + for platform in platforms: + if not isinstance(platform, dict): + yield "Platforms should be a list of dictionaries" + elif "name" not in platform: + yield "Platform should contain name" + + +def _galaxy_info_errors_itr( + galaxy_info: dict[str, Any], + info_list: tuple[str, ...] = META_INFO, + str_info_list: tuple[str, ...] = META_STR_INFO, +) -> Generator[str, None, None]: + for info in info_list: + g_info = galaxy_info.get(info, False) + if g_info: + if info in str_info_list and not isinstance(g_info, str): + yield f"{info} should be a string" + elif info == "platforms": + yield from _platform_info_errors_itr(g_info) + else: + yield f"Role info should contain {info}" + + +class MetaMainHasInfoRule(AnsibleLintRule): + """meta/main.yml should contain relevant info.""" + + id = "meta-no-info" + str_info = META_STR_INFO + info = META_INFO + description = f"meta/main.yml should contain: {', '.join(info)}" + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind != "meta": + return [] + + # since Ansible 2.10 we can add a meta/requirements.yml but + # we only want to match on meta/main.yml + if file.path.name != "main.yml": + return [] + + galaxy_info = data.get("galaxy_info", False) + if galaxy_info: + return [ + self.create_matcherror(message=err, filename=file) + for err in _galaxy_info_errors_itr(galaxy_info) + ] + return [self.create_matcherror(message="No 'galaxy_info' found", filename=file)] diff --git a/src/ansiblelint/rules/meta_no_tags.md b/src/ansiblelint/rules/meta_no_tags.md new file mode 100644 index 0000000..9518549 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_tags.md @@ -0,0 +1,22 @@ +# meta-no-tags + +This rule checks role metadata for tags with special characters. +Always use lowercase numbers and letters for tags in the `meta/main.yml` file. + +## Problematic Code + +```yaml +--- +# Metadata tags contain upper-case letters and special characters. +galaxy_info: + galaxy_tags: [MyTag#1, MyTag&^-] +``` + +## Correct Code + +```yaml +--- +# Metadata tags contain only lowercase letters and numbers. +galaxy_info: + galaxy_tags: [mytag1, mytag2] +``` diff --git a/src/ansiblelint/rules/meta_no_tags.py b/src/ansiblelint/rules/meta_no_tags.py new file mode 100644 index 0000000..4fab461 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_tags.py @@ -0,0 +1,163 @@ +"""Implementation of meta-no-tags rule.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING + +from ansiblelint.rules import AnsibleLintRule + +# Copyright (c) 2018, Ansible Project + + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaTagValidRule(AnsibleLintRule): + """Tags must contain lowercase letters and digits only.""" + + id = "meta-no-tags" + description = ( + "Tags must contain lowercase letters and digits only, " + "and ``galaxy_tags`` is expected to be a list" + ) + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + TAG_REGEXP = re.compile("^[a-z0-9]+$") + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Find violations inside meta files.""" + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + tags = [] + results = [] + + if "galaxy_tags" in galaxy_info: + if isinstance(galaxy_info["galaxy_tags"], list): + tags += galaxy_info["galaxy_tags"] + else: + results.append( + self.create_matcherror( + "Expected 'galaxy_tags' to be a list", filename=file + ) + ) + + if "categories" in galaxy_info: + results.append( + self.create_matcherror( + "Use 'galaxy_tags' rather than 'categories'", filename=file + ) + ) + if isinstance(galaxy_info["categories"], list): + tags += galaxy_info["categories"] + else: + results.append( + self.create_matcherror( + "Expected 'categories' to be a list", filename=file + ) + ) + + for tag in tags: + msg = self.shortdesc + if not isinstance(tag, str): + results.append( + self.create_matcherror( + f"Tags must be strings: '{tag}'", filename=file + ) + ) + continue + if not re.match(self.TAG_REGEXP, tag): + results.append( + self.create_matcherror( + message=f"{msg}, invalid: '{tag}'", filename=file + ) + ) + + return results + + +META_TAG_VALID = """ +galaxy_info: + galaxy_tags: ['database', 'my s q l', 'MYTAG'] + categories: 'my_category_not_in_a_list' +""" + +META_TAG_NO_GALAXY_INFO = """ +galaxy_tags: ['database', 'my s q l', 'MYTAG'] +""" + +META_TAG_NO_LIST = """ +galaxy_info: + galaxy_tags: 'database' +""" + +META_CATEGORIES_AS_LIST = """ +galaxy_info: + galaxy_tags: ['database', 'my s q l', 'MYTAG'] + categories: ['networking', 'posix'] +""" + +META_TAGS_NOT_A_STRING = """ +galaxy_info: + galaxy_tags: [False, 'database', 'my s q l', 'MYTAG'] + categories: 'networking' +""" + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_valid_tag_rule(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_VALID) + assert "Use 'galaxy_tags' rather than 'categories'" in str(results), results + assert "Expected 'categories' to be a list" in str(results) + assert "invalid: 'my s q l'" in str(results) + assert "invalid: 'MYTAG'" in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_no_galaxy_info(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_NO_GALAXY_INFO) + assert results == [] + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_no_galaxy_tags_list(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_NO_LIST) + assert "Expected 'galaxy_tags' to be a list" in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_galaxy_categories_as_list(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_CATEGORIES_AS_LIST) + assert "Use 'galaxy_tags' rather than 'categories'" in str(results), results + assert "Expected 'categories' to be a list" not in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_tags_not_a_string(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAGS_NOT_A_STRING) + assert "Tags must be strings" in str(results) diff --git a/src/ansiblelint/rules/meta_runtime.md b/src/ansiblelint/rules/meta_runtime.md new file mode 100644 index 0000000..5526912 --- /dev/null +++ b/src/ansiblelint/rules/meta_runtime.md @@ -0,0 +1,45 @@ +# meta-runtime + +This rule checks the meta/runtime.yml `requires_ansible` key against the list of currently supported versions of ansible-core. + +This rule can produce messages such: + +- `requires_ansible` key must be set to a supported version. + +Currently supported versions of ansible-core are: + +- `2.9.10` +- `2.11.x` +- `2.12.x` +- `2.13.x` +- `2.14.x` +- `2.15.x` + +This rule can produce messages such as: + +- `meta-runtime[unsupported-version]` - `requires_ansible` key must contain a supported version, shown in the list above. +- `meta-runtime[invalid-version]` - `requires_ansible` key must be a valid version identifier. + + +## Problematic code + +```yaml +# runtime.yml +--- +requires_ansible: ">=2.9" +``` + + +```yaml +# runtime.yml +--- +requires_ansible: "2.9" +``` + +## Correct code + +```yaml +# runtime.yml +--- +requires_ansible: ">=2.9.10" +``` diff --git a/src/ansiblelint/rules/meta_runtime.py b/src/ansiblelint/rules/meta_runtime.py new file mode 100644 index 0000000..2e2fdd5 --- /dev/null +++ b/src/ansiblelint/rules/meta_runtime.py @@ -0,0 +1,125 @@ +"""Implementation of meta-runtime rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING + +from packaging.specifiers import SpecifierSet + +from ansiblelint.rules import AnsibleLintRule + +# Copyright (c) 2018, Ansible Project + + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class CheckRequiresAnsibleVersion(AnsibleLintRule): + """Required ansible version in meta/runtime.yml must be a supported version.""" + + id = "meta-runtime" + description = ( + "The ``requires_ansible`` key in runtime.yml must specify " + "a supported platform version of ansible-core and be a valid version." + ) + severity = "VERY_HIGH" + tags = ["metadata"] + version_added = "v6.11.0 (last update)" + + # Refer to https://access.redhat.com/support/policy/updates/ansible-automation-platform + # Also add devel to this list + supported_ansible = ["2.9.10", "2.11.", "2.12.", "2.13.", "2.14.", "2.15."] + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Find violations inside meta files. + + :param file: Input lintable file that is a match for `meta-runtime` + :returns: List of errors matched to the input file + """ + results = [] + + if file.kind != "meta-runtime": + return [] + + version_required = file.data.get("requires_ansible", None) + + if version_required: + if not any( + version in version_required for version in self.supported_ansible + ): + results.append( + self.create_matcherror( + message="requires_ansible key must be set to a supported version.", + tag="meta-runtime[unsupported-version]", + filename=file, + ) + ) + + try: + SpecifierSet(version_required) + except ValueError: + results.append( + self.create_matcherror( + message="'requires_ansible' is not a valid requirement specification", + tag="meta-runtime[invalid-version]", + filename=file, + ) + ) + return results + + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures", "tags"), + ( + pytest.param( + "examples/meta_runtime_version_checks/pass/meta/runtime.yml", + 0, + "meta-runtime[unsupported-version]", + id="pass", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_0/meta/runtime.yml", + 1, + "meta-runtime[unsupported-version]", + id="fail0", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_1/meta/runtime.yml", + 1, + "meta-runtime[unsupported-version]", + id="fail1", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_2/meta/runtime.yml", + 1, + "meta-runtime[invalid-version]", + id="fail2", + ), + ), + ) + def test_meta_supported_version( + default_rules_collection: RulesCollection, + test_file: str, + failures: int, + tags: str, + ) -> None: + """Test rule matches.""" + default_rules_collection.register(CheckRequiresAnsibleVersion()) + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == CheckRequiresAnsibleVersion().id + assert result.tag == tags + assert len(results) == failures diff --git a/src/ansiblelint/rules/meta_video_links.md b/src/ansiblelint/rules/meta_video_links.md new file mode 100644 index 0000000..c3f051b --- /dev/null +++ b/src/ansiblelint/rules/meta_video_links.md @@ -0,0 +1,36 @@ +# meta-video-links + +This rule checks formatting for video links in metadata. Always use dictionaries +for items in the `meta/main.yml` file. + +Items in the `video_links` section must be in a dictionary and use the following +keys: + +- `url` +- `title` + +The value of the `url` key must be a shared link from YouTube, Vimeo, or Google +Drive. + +## Problematic Code + +```yaml +--- +galaxy_info: + video_links: + - https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Does not use the url key. + - my_bad_key: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Uses an unsupported key. + title: Incorrect key. + - url: www.acme.com/vid # <- Uses an unsupported url format. + title: Incorrect url format. +``` + +## Correct Code + +```yaml +--- +galaxy_info: + video_links: + - url: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Uses a supported shared link with the url key. + title: Correctly formatted video link. +``` diff --git a/src/ansiblelint/rules/meta_video_links.py b/src/ansiblelint/rules/meta_video_links.py new file mode 100644 index 0000000..402844a --- /dev/null +++ b/src/ansiblelint/rules/meta_video_links.py @@ -0,0 +1,80 @@ +"""Implementation of meta-video-links rule.""" +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from ansiblelint.constants import FILENAME_KEY, LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaVideoLinksRule(AnsibleLintRule): + """meta/main.yml video_links should be formatted correctly.""" + + id = "meta-video-links" + description = ( + "Items in ``video_links`` in meta/main.yml should be " + "dictionaries, and contain only keys ``url`` and ``title``, " + "and have a shared link from a supported provider" + ) + severity = "LOW" + tags = ["metadata"] + version_added = "v4.0.0" + + VIDEO_REGEXP = { + "google": re.compile(r"https://drive\.google\.com.*file/d/([0-9A-Za-z-_]+)/.*"), + "vimeo": re.compile(r"https://vimeo\.com/([0-9]+)"), + "youtube": re.compile(r"https://youtu\.be/([0-9A-Za-z-_]+)"), + } + + def matchyaml(self, file: Lintable) -> list[MatchError]: + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + video_links = galaxy_info.get("video_links", None) + if not video_links: + return [] + + results = [] + + for video in video_links: + if not isinstance(video, dict): + results.append( + self.create_matcherror( + "Expected item in 'video_links' to be a dictionary", + filename=file, + ) + ) + continue + + if set(video) != {"url", "title", FILENAME_KEY, LINE_NUMBER_KEY}: + results.append( + self.create_matcherror( + "Expected item in 'video_links' to contain " + "only keys 'url' and 'title'", + filename=file, + ) + ) + continue + + for _, expr in self.VIDEO_REGEXP.items(): + if expr.match(video["url"]): + break + else: + msg = ( + f"URL format '{video['url']}' is not recognized. " + "Expected it be a shared link from Vimeo, YouTube, " + "or Google Drive." + ) + results.append(self.create_matcherror(msg, filename=file)) + + return results diff --git a/src/ansiblelint/rules/name.md b/src/ansiblelint/rules/name.md new file mode 100644 index 0000000..9df4213 --- /dev/null +++ b/src/ansiblelint/rules/name.md @@ -0,0 +1,61 @@ +# name + +This rule identifies several problems related to the naming of tasks and plays. +This is important because these names are the primary way to **identify** and +**document** executed operations on the console, logs or web interface. + +This rule can produce messages as: + +- `name[casing]` - All names should start with an uppercase letter for languages + that support it. +- `name[missing]` - All tasks should be named. +- `name[play]` - All plays should be named. +- `name[prefix]` - Prefix task names in sub-tasks files. (opt-in) +- `name[template]` - Jinja templates should only be at the end of 'name'. This + helps with the identification of tasks inside the source code when they fail. + The use of templating inside `name` keys is discouraged as there are multiple + cases where the rendering of the name template is not possible. + +If you want to ignore some of the messages above, you can add any of them to the +`skip_list`. + +## name[prefix] + +This rule applies only to included task files that are not named `main.yml`. It +suggests adding the stem of the file as a prefix to the task name. + +For example, if you have a task named `Restart server` inside a file named +`tasks/deploy.yml`, this rule suggests renaming it to `deploy | Restart server`, +so it would be easier to identify where it comes from. + +For the moment, this sub-rule is just an **opt-in**, so you need to add it to +your `enable_list` to activate it. + +!!! note + + This rule was designed by [Red Hat Community of Practice](https://redhat-cop.github.io/automation-good-practices/#_prefix_task_names_in_sub_tasks_files_of_roles). The reasoning behind it being + that in a complex roles or playbooks with multiple (sub-)tasks file, it becomes + difficult to understand which task belongs to which file. Adding a prefix, in + combination with the role’s name automatically added by Ansible, makes it a + lot easier to follow and troubleshoot a role play. + +## Problematic code + +```yaml +--- +- hosts: localhost # <-- playbook name[play] + tasks: + - name: create placefolder file # <-- name[casing] due lack of capital letter + ansible.builtin.command: touch /tmp/.placeholder +``` + +## Correct code + +```yaml +--- +- name: Play for creating placeholder + hosts: localhost + tasks: + - name: Create placeholder file + ansible.builtin.command: touch /tmp/.placeholder +``` diff --git a/src/ansiblelint/rules/name.py b/src/ansiblelint/rules/name.py new file mode 100644 index 0000000..671829e --- /dev/null +++ b/src/ansiblelint/rules/name.py @@ -0,0 +1,214 @@ +"""Implementation of NameRule.""" +from __future__ import annotations + +import re +import sys +from copy import deepcopy +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable # noqa: F811 + + +class NameRule(AnsibleLintRule): + """Rule for checking task and play names.""" + + id = "name" + description = ( + "All tasks and plays should have a distinct name for readability " + "and for ``--start-at-task`` to work" + ) + severity = "MEDIUM" + tags = ["idiom"] + version_added = "v6.9.1 (last update)" + _re_templated_inside = re.compile(r".*\{\{.*\}\}.*\w.*$") + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific play (entry in playbook).""" + results = [] + if file.kind != "playbook": + return [] + if "name" not in data: + return [ + self.create_matcherror( + message="All plays should be named.", + linenumber=data[LINE_NUMBER_KEY], + tag="name[play]", + filename=file, + ) + ] + results.extend( + self._check_name( + data["name"], lintable=file, linenumber=data[LINE_NUMBER_KEY] + ) + ) + return results + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results = [] + name = task.get("name") + if not name: + results.append( + self.create_matcherror( + message="All tasks should be named.", + linenumber=task[LINE_NUMBER_KEY], + tag="name[missing]", + filename=file, + ) + ) + else: + results.extend( + self._prefix_check( + name, lintable=file, linenumber=task[LINE_NUMBER_KEY] + ) + ) + return results + + def _prefix_check( + self, name: str, lintable: Lintable | None, linenumber: int + ) -> list[MatchError]: + results: list[MatchError] = [] + effective_name = name + if lintable is None: + return [] + + if not results: + results.extend( + self._check_name( + effective_name, lintable=lintable, linenumber=linenumber + ) + ) + return results + + def _check_name( + self, name: str, lintable: Lintable | None, linenumber: int + ) -> list[MatchError]: + # This rules applies only to languages that do have uppercase and + # lowercase letter, so we ignore anything else. On Unicode isupper() + # is not necessarily the opposite of islower() + results = [] + # stage one check prefix + effective_name = name + if self._collection and lintable: + prefix = self._collection.options.task_name_prefix.format( + stem=lintable.path.stem + ) + if lintable.kind == "tasks" and lintable.path.stem != "main": + if not name.startswith(prefix): + # For the moment in order to raise errors this rule needs to be + # enabled manually. Still, we do allow use of prefixes even without + # having to enable the rule. + if "name[prefix]" in self._collection.options.enable_list: + results.append( + self.create_matcherror( + message=f"Task name should start with '{prefix}'.", + linenumber=linenumber, + tag="name[prefix]", + filename=lintable, + ) + ) + return results + else: + effective_name = name[len(prefix) :] + + if ( + effective_name[0].isalpha() + and effective_name[0].islower() + and not effective_name[0].isupper() + ): + results.append( + self.create_matcherror( + message="All names should start with an uppercase letter.", + linenumber=linenumber, + tag="name[casing]", + filename=lintable, + ) + ) + if self._re_templated_inside.match(name): + results.append( + self.create_matcherror( + message="Jinja templates should only be at the end of 'name'", + linenumber=linenumber, + tag="name[template]", + filename=lintable, + ) + ) + return results + + +if "pytest" in sys.modules: # noqa: C901 + from ansiblelint.config import options + from ansiblelint.file_utils import Lintable # noqa: F811 + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_file_positive() -> None: + """Positive test for unnamed-task.""" + collection = RulesCollection() + collection.register(NameRule()) + success = "examples/playbooks/rule-name-missing-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_file_negative() -> None: + """Negative test for unnamed-task.""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-missing-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 5 + + def test_name_prefix_negative() -> None: + """Negative test for unnamed-task.""" + custom_options = deepcopy(options) + custom_options.enable_list = ["name[prefix]"] + collection = RulesCollection(options=custom_options) + collection.register(NameRule()) + failure = Lintable( + "examples/playbooks/tasks/rule-name-prefix-fail.yml", kind="tasks" + ) + bad_runner = Runner(failure, rules=collection) + results = bad_runner.run() + assert len(results) == 3 + # , "\n".join(results) + assert results[0].tag == "name[casing]" + assert results[1].tag == "name[prefix]" + assert results[2].tag == "name[prefix]" + + def test_rule_name_lowercase() -> None: + """Negative test for a task that starts with lowercase.""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-casing.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + assert errs[0].tag == "name[casing]" + assert errs[0].rule.id == "name" + + def test_name_play() -> None: + """Positive test for name[play].""" + collection = RulesCollection() + collection.register(NameRule()) + success = "examples/playbooks/rule-name-play-fail.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 1 + assert errs[0].tag == "name[play]" + assert errs[0].rule.id == "name" + + def test_name_template() -> None: + """Negative test for name[templated].""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-templated-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + assert errs[0].tag == "name[template]" diff --git a/src/ansiblelint/rules/no_changed_when.md b/src/ansiblelint/rules/no_changed_when.md new file mode 100644 index 0000000..2e0cb82 --- /dev/null +++ b/src/ansiblelint/rules/no_changed_when.md @@ -0,0 +1,46 @@ +# no-changed-when + +This rule checks that tasks return changes to results or conditions. Unless +tasks only read information, you should ensure that they return changes in the +following ways: + +- Register results or conditions and use the `changed_when` clause. +- Use the `creates` or `removes` argument. + +You should always use the `changed_when` clause on tasks that do not naturally +detect if a change has occurred or not. Some of the most common examples are +[shell] and [command] modules, which run arbitrary commands. + +One very common workaround is to use a boolean value like `changed_when: false` +if the task never changes anything or `changed_when: true` if it always +changes something, but you can also use any expressions, including ones that +use the registered result of a task, like in our example below. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Does not handle any output or return codes + ansible.builtin.command: cat {{ my_file | quote }} # <- Does not handle the command output. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Handle shell output with return code + ansible.builtin.command: cat {{ my_file | quote }} + register: my_output # <- Registers the command output. + changed_when: my_output.rc != 0 # <- Uses the return code to define when the task has changed. +``` + +[shell]: + https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html +[command]: + https://docs.ansible.com/ansible/latest/collections/ansible/builtin/command_module.html diff --git a/src/ansiblelint/rules/no_changed_when.py b/src/ansiblelint/rules/no_changed_when.py new file mode 100644 index 0000000..488b0c2 --- /dev/null +++ b/src/ansiblelint/rules/no_changed_when.py @@ -0,0 +1,94 @@ +"""Implementation of the no-changed-when rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandHasChangesCheckRule(AnsibleLintRule): + """Commands should not change things if nothing needs doing.""" + + id = "no-changed-when" + severity = "HIGH" + tags = ["command-shell", "idempotency"] + version_added = "historic" + + _commands = [ + "ansible.builtin.command", + "ansible.builtin.shell", + "ansible.builtin.raw", + "ansible.legacy.command", + "ansible.legacy.shell", + "ansible.legacy.raw", + "command", + "shell", + "raw", + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + # tasks in a block are "meta" type + if task["__ansible_action_type__"] in ["task", "meta"]: + if task["action"]["__ansible_module__"] in self._commands and ( + "changed_when" not in task + and "creates" not in task["action"] + and "removes" not in task["action"] + ): + result.append(self.create_matcherror(filename=file)) + return result + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param( + "examples/playbooks/rule-no-changed-when-pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/rule-no-changed-when-fail.yml", 3, id="fail" + ), + ), + ) + def test_rule_no_changed_when( + default_rules_collection: RulesCollection, file: str, expected: int + ) -> None: + """Validate no-changed-when rule.""" + results = Runner(file, rules=default_rules_collection).run() + + for result in results: + assert result.rule.id == CommandHasChangesCheckRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/no_free_form.md b/src/ansiblelint/rules/no_free_form.md new file mode 100644 index 0000000..3ce5140 --- /dev/null +++ b/src/ansiblelint/rules/no_free_form.md @@ -0,0 +1,58 @@ +# no-free-form + +This rule identifies any use of +[free-form](https://docs.ansible.com/ansible/2.7/user_guide/playbooks_intro.html#action-shorthand) +module calling syntax and asks for switching to the full syntax. + +**Free-form** syntax, also known as **inline** or **shorthand**, can produce +subtle bugs. It can also prevent editors and IDEs from providing feedback, +autocomplete and validation for the edited line. + +!!! note + + As long you just pass a YAML string that contains a `=` character inside as the + parameter to the action module name, we consider this as using free-formsyntax. + Be sure you pass a dictionary to the module, so the free-form parsing is never + triggered. + +As `raw` module only accepts free-form, we trigger `no-free-form[raw]` only if +we detect the presence of `executable=` inside raw calls. We advice the explicit +use of `args:` dictionary for configuring the executable to be run. + +This rule can produce messages such: + +- `no-free-form` - Free-form syntax is discouraged. +- `no-free-form[raw-non-string]` - Passing a non string value to `raw` module is + neither documented or supported. + +## Problematic code + +```yaml +--- +- name: Example with discouraged free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: chdir=/tmp touch foo # <-- don't use free-form + - name: Use raw to echo + ansible.builtin.raw: executable=/bin/bash echo foo # <-- don't use executable= + changed_when: false +``` + +## Correct code + +```yaml +--- +- name: Example that avoids free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: + cmd: touch foo # <-- ansible will not touch it + chdir: /tmp + - name: Use raw to echo + ansible.builtin.raw: echo foo + args: + executable: /bin/bash # <-- explicit is better + changed_when: false +``` diff --git a/src/ansiblelint/rules/no_free_form.py b/src/ansiblelint/rules/no_free_form.py new file mode 100644 index 0000000..5a23e8b --- /dev/null +++ b/src/ansiblelint/rules/no_free_form.py @@ -0,0 +1,107 @@ +"""Implementation of NoFreeFormRule.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import INCLUSION_ACTION_NAMES, LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoFreeFormRule(AnsibleLintRule): + """Rule for detecting discouraged free-form syntax for action modules.""" + + id = "no-free-form" + description = "Avoid free-form inside files as it can produce subtile bugs." + severity = "MEDIUM" + tags = ["syntax", "risk"] + version_added = "v6.8.0" + needs_raw_task = True + cmd_shell_re = re.compile( + r"(chdir|creates|executable|removes|stdin|stdin_add_newline|warn)=" + ) + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results: list[MatchError] = [] + action = task["action"]["__ansible_module_original__"] + + if action in INCLUSION_ACTION_NAMES: + return results + + action_value = task["__raw_task__"].get(action, None) + if task["action"].get("__ansible_module__", None) == "raw": + if isinstance(action_value, str): + if "executable=" in action_value: + results.append( + self.create_matcherror( + message="Avoid embedding `executable=` inside raw calls, use explicit args dictionary instead.", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + tag=f"{self.id}[raw]", + ) + ) + else: + results.append( + self.create_matcherror( + message="Passing a non string value to `raw` module is neither documented or supported.", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + tag=f"{self.id}[raw-non-string]", + ) + ) + elif isinstance(action_value, str) and "=" in action_value: + fail = False + if task["action"].get("__ansible_module__") in ( + "ansible.builtin.command", + "ansible.builtin.shell", + "ansible.windows.win_command", + "ansible.windows.win_shell", + "command", + "shell", + "win_command", + "win_shell", + ): + if self.cmd_shell_re.match(action_value): + fail = True + else: + fail = True + if fail: + results.append( + self.create_matcherror( + message=f"Avoid using free-form when calling module actions. ({action})", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + ) + ) + return results + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param("examples/playbooks/rule-no-free-form-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/rule-no-free-form-fail.yml", 2, id="fail"), + ), + ) + def test_rule_no_free_form( + default_rules_collection: RulesCollection, file: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = Runner(file, rules=default_rules_collection).run() + + for result in results: + assert result.rule.id == NoFreeFormRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/no_handler.md b/src/ansiblelint/rules/no_handler.md new file mode 100644 index 0000000..4deccaa --- /dev/null +++ b/src/ansiblelint/rules/no_handler.md @@ -0,0 +1,55 @@ +# no-handler + +This rule checks for the correct handling of changes to results or conditions. + +If a task has a `when: result.changed` condition, it effectively acts as a +[handler](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers). +The recommended approach is to use `notify` and move tasks to `handlers`. +If necessary you can silence the rule by add a `# noqa: no-handler` comment at the end of the line. + +## Problematic Code + +```yaml +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: 0600 + register: result # <-- Registers the result of the task. + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! + when: result.changed # <-- Triggers the no-handler rule. +``` + +```yaml +--- +# Optionally silences the rule. +when: result.changed # noqa: no-handler +``` + +## Correct Code + +The following code includes the same functionality as the problematic code without recording a `result` variable. + +```yaml +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: 0600 + notify: + - Second command to run # <-- Handler runs only when the file changes. + handlers: + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! +``` diff --git a/src/ansiblelint/rules/no_handler.py b/src/ansiblelint/rules/no_handler.py new file mode 100644 index 0000000..a7de4e4 --- /dev/null +++ b/src/ansiblelint/rules/no_handler.py @@ -0,0 +1,157 @@ +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +"""UseHandlerRatherThanWhenChangedRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +def _changed_in_when(item: str) -> bool: + item_list = item.split() + + if not isinstance(item, str) or "and" in item_list: + return False + return any( + changed in item + for changed in [ + ".changed", + "|changed", + '["changed"]', + "['changed']", + "is changed", + ] + ) + + +class UseHandlerRatherThanWhenChangedRule(AnsibleLintRule): + """Tasks that run when changed should likely be handlers.""" + + id = "no-handler" + description = ( + "If a task has a ``when: result.changed`` setting, it is effectively " + "acting as a handler. You could use ``notify`` and move that task to " + "``handlers``." + ) + link = "https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers" + severity = "MEDIUM" + tags = ["idiom"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["__ansible_action_type__"] != "task": + return False + + when = task.get("when") + + if isinstance(when, list): + for item in when: + return _changed_in_when(item) + if isinstance(when, str): + return _changed_in_when(when) + return False + + +if "pytest" in sys.modules: + import pytest + + SUCCEED_CHANGED_WHEN = """ +- hosts: all + tasks: + - name: Execute something + command: echo 123 + register: result + changed_when: true +""" + + SUCCEED_WHEN_AND = """ +- hosts: all + tasks: + - name: Registering task 1 + command: echo Hello + register: r1 + changed_when: true + + - name: Registering task 2 + command: echo Hello + register: r2 + changed_when: true + + - name: Use when task + command: echo Hello + when: r1.changed and r2.changed +""" + + FAIL_RESULT_IS_CHANGED = """ +- hosts: all + tasks: + - name: This should trigger no-handler rule + command: echo could be done better + when: result is changed +""" + + FAILED_SOMETHING_CHANGED = """ +- hosts: all + tasks: + - name: Do anything + command: echo 123 + when: + - something.changed +""" + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_succeed_changed_when(rule_runner: Any) -> None: + """Using changed_when is acceptable.""" + results = rule_runner.run_playbook(SUCCEED_CHANGED_WHEN) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_succeed_when_and(rule_runner: Any) -> None: + """See https://github.com/ansible/ansible-lint/issues/1526.""" + results = rule_runner.run_playbook(SUCCEED_WHEN_AND) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_fail_result_is_changed(rule_runner: Any) -> None: + """This task uses 'is changed'.""" + results = rule_runner.run_playbook(FAIL_RESULT_IS_CHANGED) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_failed_something_changed(rule_runner: Any) -> None: + """This task uses '.changed'.""" + results = rule_runner.run_playbook(FAILED_SOMETHING_CHANGED) + assert len(results) == 1 diff --git a/src/ansiblelint/rules/no_jinja_when.md b/src/ansiblelint/rules/no_jinja_when.md new file mode 100644 index 0000000..702e807 --- /dev/null +++ b/src/ansiblelint/rules/no_jinja_when.md @@ -0,0 +1,32 @@ +# no-jinja-when + +This rule checks conditional statements for Jinja expressions in curly brackets `{{ }}`. +Ansible processes conditionals statements that use the `when`, `failed_when`, and `changed_when` clauses as Jinja expressions. + +An Ansible rule is to always use `{{ }}` except with `when` keys. +Using `{{ }}` in conditionals creates a nested expression, which is an Ansible +anti-pattern and does not produce expected results. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Shut down Debian systems + ansible.builtin.command: /sbin/shutdown -t now + when: "{{ ansible_facts['os_family'] == 'Debian' }}" # <- Nests a Jinja expression in a conditional statement. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Shut down Debian systems + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_facts['os_family'] == "Debian" # <- Uses facts in a conditional statement. +``` diff --git a/src/ansiblelint/rules/no_jinja_when.py b/src/ansiblelint/rules/no_jinja_when.py new file mode 100644 index 0000000..c5ea41b --- /dev/null +++ b/src/ansiblelint/rules/no_jinja_when.py @@ -0,0 +1,83 @@ +"""Implementation of no-jinja-when rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class NoFormattingInWhenRule(AnsibleLintRule): + """No Jinja2 in when.""" + + id = "no-jinja-when" + description = ( + "``when`` is a raw Jinja2 expression, remove redundant {{ }} from variable(s)." + ) + severity = "HIGH" + tags = ["deprecations"] + version_added = "historic" + + @staticmethod + def _is_valid(when: str) -> bool: + if isinstance(when, list): + for item in when: + if ( + isinstance(item, str) + and item.find("{{") != -1 + and item.find("}}") != -1 + ): + return False + return True + if not isinstance(when, str): + return True + return when.find("{{") == -1 and when.find("}}") == -1 + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + errors: list[MatchError] = [] + if isinstance(data, dict): + if "roles" not in data or data["roles"] is None: + return errors + for role in data["roles"]: + if self.matchtask(role, file=file): + errors.append( + self.create_matcherror( + details=str({"when": role}), + filename=file, + linenumber=role[LINE_NUMBER_KEY], + ) + ) + return errors + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + return "when" in task and not self._is_valid(task["when"]) + + +if "pytest" in sys.modules: + # Tests for no-jinja-when rule. + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_file_positive() -> None: + """Positive test for no-jinja-when.""" + collection = RulesCollection() + collection.register(NoFormattingInWhenRule()) + success = "examples/playbooks/rule-no-jinja-when-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_file_negative() -> None: + """Negative test for no-jinja-when.""" + collection = RulesCollection() + collection.register(NoFormattingInWhenRule()) + failure = "examples/playbooks/rule-no-jinja-when-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 3 diff --git a/src/ansiblelint/rules/no_log_password.md b/src/ansiblelint/rules/no_log_password.md new file mode 100644 index 0000000..579dd11 --- /dev/null +++ b/src/ansiblelint/rules/no_log_password.md @@ -0,0 +1,45 @@ +# no-log-password + +This rule ensures playbooks do not write passwords to logs when using loops. +Always set the `no_log: true` attribute to protect sensitive data. + +While most Ansible modules mask sensitive data, using secrets inside a loop can result in those secrets being logged. +Explicitly adding `no_log: true` prevents accidentally exposing secrets. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Log user passwords + ansible.builtin.user: + name: john_doe + comment: John Doe + uid: 1040 + group: admin + password: "{{ item }}" + with_items: + - wow + no_log: false # <- Sets the no_log attribute to false. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Do not log user passwords + ansible.builtin.user: + name: john_doe + comment: John Doe + uid: 1040 + group: admin + password: "{{ item }}" + with_items: + - wow + no_log: true # <- Sets the no_log attribute to a non-false value. +``` diff --git a/src/ansiblelint/rules/no_log_password.py b/src/ansiblelint/rules/no_log_password.py new file mode 100644 index 0000000..bba2415 --- /dev/null +++ b/src/ansiblelint/rules/no_log_password.py @@ -0,0 +1,265 @@ +# Copyright 2018, Rackspace US, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""NoLogPasswordsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoLogPasswordsRule(AnsibleLintRule): + """Password should not be logged.""" + + id = "no-log-password" + description = ( + "When passing password argument you should have no_log configured " + "to a non False value to avoid accidental leaking of secrets." + ) + severity = "LOW" + tags = ["opt-in", "security", "experimental"] + version_added = "v5.0.9" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module_original__"] == "ansible.builtin.user" and ( + (task["action"].get("password_lock") or task["action"].get("password_lock")) + and not task["action"].get("password") + ): + has_password = False + else: + for param in task["action"].keys(): + if "password" in param: + has_password = True + break + else: + has_password = False + + has_loop = [key for key in task if key.startswith("with_") or key == "loop"] + # No no_log and no_log: False behave the same way + # and should return a failure (return True), so we + # need to invert the boolean + no_log = task.get("no_log", False) + + if ( + isinstance(no_log, str) + and no_log.startswith("{{") + and no_log.endswith("}}") + ): + # we cannot really evaluate jinja expressions + return False + + return bool( + has_password and not convert_to_boolean(no_log) and len(has_loop) > 0 + ) + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + NO_LOG_UNUSED = """ +- name: Test + hosts: all + tasks: + - name: Succeed when no_log is not used but no loop present + ansible.builtin.user: + name: john_doe + password: "wow" + state: absent +""" + + NO_LOG_FALSE = """ +- hosts: all + tasks: + - name: Use of jinja for no_log is valid + user: + name: john_doe + user_password: "{{ item }}" + state: absent + no_log: "{{ False }}" + - name: Fail when no_log is set to False + user: + name: john_doe + user_password: "{{ item }}" + state: absent + with_items: + - wow + - now + no_log: False + - name: Fail when no_log is set to False + ansible.builtin.user: + name: john_doe + user_password: "{{ item }}" + state: absent + with_items: + - wow + - now + no_log: False +""" + + NO_LOG_NO = """ +- hosts: all + tasks: + - name: Fail when no_log is set to no + user: + name: john_doe + password: "{{ item }}" + state: absent + no_log: no + loop: + - wow + - now +""" + + PASSWORD_WITH_LOCK = """ +- hosts: all + tasks: + - name: Fail when password is set and password_lock is true + user: + name: "{{ item }}" + password: "wow" + password_lock: true + with_random_choice: + - ansible + - lint +""" + + NO_LOG_YES = """ +- hosts: all + tasks: + - name: Succeed when no_log is set to yes + with_list: + - name: user + password: wow + - password: now + name: ansible + user: + name: "{{ item.name }}" + password: "{{ item.password }}" + state: absent + no_log: yes +""" + + NO_LOG_TRUE = """ +- hosts: all + tasks: + - name: Succeed when no_log is set to True + user: + name: john_doe + user_password: "{{ item }}" + state: absent + no_log: True + loop: + - wow + - now +""" + + PASSWORD_LOCK_YES = """ +- hosts: all + tasks: + - name: Succeed when only password locking account + user: + name: "{{ item }}" + password_lock: yes + # user_password: "this is a comment, not a password" + with_list: + - ansible + - lint +""" + + PASSWORD_LOCK_FALSE = """ +- hosts: all + tasks: + - name: Succeed when password_lock is false and password is not used + user: + name: lint + password_lock: False +""" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_unused(rule_runner: RunFromText) -> None: + """The task does not use no_log but also no loop.""" + results = rule_runner.run_playbook(NO_LOG_UNUSED) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_false(rule_runner: RunFromText) -> None: + """The task sets no_log to false.""" + results = rule_runner.run_playbook(NO_LOG_FALSE) + assert len(results) == 2 + for result in results: + assert result.rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_no(rule_runner: RunFromText) -> None: + """The task sets no_log to no.""" + results = rule_runner.run_playbook(NO_LOG_NO) + assert len(results) == 1 + assert results[0].rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_password_with_lock(rule_runner: RunFromText) -> None: + """The task sets a password but also lock the user.""" + results = rule_runner.run_playbook(PASSWORD_WITH_LOCK) + assert len(results) == 1 + assert results[0].rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_yes(rule_runner: RunFromText) -> None: + """The task sets no_log to yes.""" + results = rule_runner.run_playbook(NO_LOG_YES) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_true(rule_runner: RunFromText) -> None: + """The task sets no_log to true.""" + results = rule_runner.run_playbook(NO_LOG_TRUE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_password_lock_yes(rule_runner: RunFromText) -> None: + """The task only locks the user.""" + results = rule_runner.run_playbook(PASSWORD_LOCK_YES) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_password_lock_false(rule_runner: RunFromText) -> None: + """The task does not actually lock the user.""" + results = rule_runner.run_playbook(PASSWORD_LOCK_FALSE) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/no_prompting.md b/src/ansiblelint/rules/no_prompting.md new file mode 100644 index 0000000..7e525c8 --- /dev/null +++ b/src/ansiblelint/rules/no_prompting.md @@ -0,0 +1,35 @@ +# no-prompting + +This rule checks for `vars_prompt` or the `ansible.builtin.pause` module in playbooks. +You should enable this rule to ensure that playbooks can run unattended and in CI/CD pipelines. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - no-prompting +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + vars_prompt: # <- Prompts the user to input credentials. + - name: username + prompt: What is your username? + private: false + + - name: password + prompt: What is your password? + tasks: + - name: Pause for 5 minutes + ansible.builtin.pause: + minutes: 5 # <- Pauses playbook execution for a set period of time. +``` + +## Correct Code + +Correct code for this rule is to omit `vars_prompt` and the `ansible.builtin.pause` module from your playbook. diff --git a/src/ansiblelint/rules/no_prompting.py b/src/ansiblelint/rules/no_prompting.py new file mode 100644 index 0000000..27893d1 --- /dev/null +++ b/src/ansiblelint/rules/no_prompting.py @@ -0,0 +1,73 @@ +"""Implementation of no-prompting rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class NoPromptingRule(AnsibleLintRule): + """Disallow prompting.""" + + id = "no-prompting" + description = ( + "Disallow the use of vars_prompt or ansible.builtin.pause to better" + "accommodate unattended playbook runs and use in CI pipelines." + ) + tags = ["opt-in"] + severity = "VERY_LOW" + version_added = "v6.0.3" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + # If the Play uses the 'vars_prompt' section to set variables + + if file.kind != "playbook": # pragma: no cover + return [] + + vars_prompt = data.get("vars_prompt", None) + if not vars_prompt: + return [] + return [ + self.create_matcherror( + message="Play uses vars_prompt", + linenumber=vars_prompt[0][LINE_NUMBER_KEY], + filename=file, + ) + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for ansible.builtin.pause tasks.""" + # We do not want to trigger this rule if pause has either seconds or + # minutes defined, as that does not make it blocking. + return task["action"]["__ansible_module_original__"] in [ + "pause", + "ansible.builtin.pause", + ] and not ( + task["action"].get("minutes", None) or task["action"].get("seconds", None) + ) + + +if "pytest" in sys.modules: + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_no_prompting_fail() -> None: + """Negative test for no-prompting.""" + # For testing we want to manually enable opt-in rules + options.enable_list = ["no-prompting"] + rules = RulesCollection(options=options) + rules.register(NoPromptingRule()) + results = Runner("examples/playbooks/rule-no-prompting.yml", rules=rules).run() + assert len(results) == 2 + for result in results: + assert result.rule.id == "no-prompting" diff --git a/src/ansiblelint/rules/no_relative_paths.md b/src/ansiblelint/rules/no_relative_paths.md new file mode 100644 index 0000000..568a145 --- /dev/null +++ b/src/ansiblelint/rules/no_relative_paths.md @@ -0,0 +1,94 @@ +# no-relative-paths + +This rule checks for relative paths in the `ansible.builtin.copy` and +`ansible.builtin.template` modules. + +Relative paths in a task most often direct Ansible to remote files and +directories on managed nodes. In the `ansible.builtin.copy` and +`ansible.builtin.template` modules, the `src` argument refers to local files and +directories on the control node. + +The recommended locations to store files are as follows: + +- Use the `files/` folder in the playbook or role directory for the `copy` + module. +- Use the `templates/` folder in the playbook or role directory for the + `template` module. + +These folders allow you to omit the path or use a sub-folder when specifying +files with the `src` argument. + +!!! note + + If resources are outside your Ansible playbook or role directory you should use an absolute path with the `src` argument. + +!!! warning + + Do not store resources at the same directory level as your Ansible playbook or tasks files. + Doing this can result in disorganized projects and cause user confusion when distinguishing between resources of the same type, such as YAML. + +See +[task paths](https://docs.ansible.com/ansible/latest/playbook_guide/playbook_pathing.html#task-paths) +in the Ansible documentation for more information. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Template a file to /etc/file.conf + ansible.builtin.template: + src: ../my_templates/foo.j2 # <- Uses a relative path in the src argument. + dest: /etc/file.conf + owner: bin + group: wheel + mode: "0644" +``` + +```yaml +- name: Example playbook + hosts: all + vars: + source_path: ../../my_templates/foo.j2 # <- Sets a variable to a relative path. + tasks: + - name: Copy a file to /etc/file.conf + ansible.builtin.copy: + src: "{{ source_path }}" # <- Uses the variable in the src argument. + dest: /etc/foo.conf + owner: foo + group: foo + mode: "0644" +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Template a file to /etc/file.conf + ansible.builtin.template: + src: foo.j2 # <- Uses a path from inside templates/ directory. + dest: /etc/file.conf + owner: bin + group: wheel + mode: "0644" +``` + +```yaml +- name: Example playbook + hosts: all + vars: + source_path: foo.j2 # <- Uses a path from inside files/ directory. + tasks: + - name: Copy a file to /etc/file.conf + ansible.builtin.copy: + src: "{{ source_path }}" # <- Uses the variable in the src argument. + dest: /etc/foo.conf + owner: foo + group: foo + mode: "0644" +``` diff --git a/src/ansiblelint/rules/no_relative_paths.py b/src/ansiblelint/rules/no_relative_paths.py new file mode 100644 index 0000000..16ea51b --- /dev/null +++ b/src/ansiblelint/rules/no_relative_paths.py @@ -0,0 +1,45 @@ +"""Implementation of no-relative-paths rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RoleRelativePath(AnsibleLintRule): + """The src argument should not use a relative path.""" + + id = "no-relative-paths" + description = "The ``copy`` and ``template`` modules should not use relative path for ``src``." + severity = "HIGH" + tags = ["idiom"] + version_added = "v4.0.0" + + _module_to_path_folder = { + "copy": "files", + "win_copy": "files", + "template": "templates", + "win_template": "win_templates", + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + if module not in self._module_to_path_folder: + return False + + if "src" not in task["action"]: + return False + + path_to_check = f"../{self._module_to_path_folder[module]}" + if path_to_check in task["action"]["src"]: + return True + + return False diff --git a/src/ansiblelint/rules/no_same_owner.md b/src/ansiblelint/rules/no_same_owner.md new file mode 100644 index 0000000..350a3d4 --- /dev/null +++ b/src/ansiblelint/rules/no_same_owner.md @@ -0,0 +1,55 @@ +# no-same-owner + +This rule checks that the owner and group do not transfer across hosts. + +In many cases the owner and group on remote hosts do not match the owner and group assigned to source files. +Preserving the owner and group during transfer can result in errors with permissions or leaking sensitive information. + +When you synchronize files, you should avoid transferring the owner and group by setting `owner: false` and `group: false` arguments. +When you unpack archives with the `ansible.builtin.unarchive` module you should set the `--no-same-owner` option. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - no-same-owner +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Synchronize conf file + ansible.posix.synchronize: + src: /path/conf.yaml + dest: /path/conf.yaml # <- Transfers the owner and group for the file. + - name: Extract tarball to path + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: /my/path/ # <- Transfers the owner and group for the file. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Synchronize conf file + ansible.posix.synchronize: + src: /path/conf.yaml + dest: /path/conf.yaml + owner: false + group: false # <- Does not transfer the owner and group for the file. + - name: Extract tarball to path + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: /my/path/ + extra_opts: + - --no-same-owner # <- Does not transfer the owner and group for the file. +``` diff --git a/src/ansiblelint/rules/no_same_owner.py b/src/ansiblelint/rules/no_same_owner.py new file mode 100644 index 0000000..77c1b40 --- /dev/null +++ b/src/ansiblelint/rules/no_same_owner.py @@ -0,0 +1,104 @@ +"""Optional rule for avoiding keeping owner/group when transferring files.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansible.utils.sentinel import Sentinel + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoSameOwnerRule(AnsibleLintRule): + """Do not preserve the owner and group when transferring files across hosts.""" + + id = "no-same-owner" + description = """ +Optional rule that highlights dangers of assuming that user/group on the remote +machines may not exist on ansible controller or vice versa. Owner and group +should not be preserved when transferring files between them. +""" + severity = "LOW" + tags = ["opt-in"] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for a task.""" + action = task.get("action") + if not isinstance(action, dict): + return False + + module = action["__ansible_module__"] + + if module in ["synchronize", "ansible.posix.synchronize"]: + return self.handle_synchronize(task, action) + + if module in ["unarchive", "ansible.builtin.unarchive"]: + return self.handle_unarchive(task, action) + + return False + + @staticmethod + def handle_synchronize(task: Any, action: dict[str, Any]) -> bool: + """Process a synchronize task.""" + if task.get("delegate_to") != Sentinel: + return False + + archive = action.get("archive", True) + if action.get("owner", archive) or action.get("group", archive): + return True + return False + + @staticmethod + def handle_unarchive(task: Any, action: dict[str, Any]) -> bool: + """Process unarchive task.""" + delegate_to = task.get("delegate_to") + if ( + delegate_to == "localhost" + or delegate_to != "localhost" + and not action.get("remote_src") + ): + src = action.get("src") + if not isinstance(src, str): + return False + + if src.endswith("zip"): + if "-X" in action.get("extra_opts", []): + return True + if re.search(r".*\.tar(\.(gz|bz2|xz))?$", src): + if "--no-same-owner" not in action.get("extra_opts", []): + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures"), + ( + pytest.param( + "examples/roles/role_for_no_same_owner/tasks/fail.yml", 12, id="fail" + ), + pytest.param( + "examples/roles/role_for_no_same_owner/tasks/pass.yml", 0, id="pass" + ), + ), + ) + def test_no_same_owner_rule( + default_rules_collection: RulesCollection, test_file: str, failures: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + assert len(results) == failures + for result in results: + assert result.message == NoSameOwnerRule().shortdesc diff --git a/src/ansiblelint/rules/no_tabs.md b/src/ansiblelint/rules/no_tabs.md new file mode 100644 index 0000000..7895122 --- /dev/null +++ b/src/ansiblelint/rules/no_tabs.md @@ -0,0 +1,38 @@ +# no-tabs + +This rule checks for the tab character. The `\t` tab character can result in +unexpected display or formatting issues. You should always use spaces instead of +tabs. + +!!! note + + This rule does not trigger alerts for tab characters in the ``ansible.builtin.lineinfile`` module. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Do not trigger the rule + ansible.builtin.lineinfile: + path: some.txt + regexp: '^\t$' + line: 'string with \t inside' + - name: Trigger the rule with a debug message + ansible.builtin.debug: + msg: "Using the \t character can cause formatting issues." # <- Includes the tab character. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Do not trigger the no-tabs rule + ansible.builtin.debug: + msg: "Using space characters avoids formatting issues." +``` diff --git a/src/ansiblelint/rules/no_tabs.py b/src/ansiblelint/rules/no_tabs.py new file mode 100644 index 0000000..c57e352 --- /dev/null +++ b/src/ansiblelint/rules/no_tabs.py @@ -0,0 +1,63 @@ +"""Implementation of no-tabs rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoTabsRule(AnsibleLintRule): + """Most files should not contain tabs.""" + + id = "no-tabs" + description = "Tabs can cause unexpected display issues, use spaces" + severity = "LOW" + tags = ["formatting"] + version_added = "v4.0.0" + allow_list = [ + ("lineinfile", "insertafter"), + ("lineinfile", "insertbefore"), + ("lineinfile", "regexp"), + ("lineinfile", "line"), + ("ansible.builtin.lineinfile", "insertafter"), + ("ansible.builtin.lineinfile", "insertbefore"), + ("ansible.builtin.lineinfile", "regexp"), + ("ansible.builtin.lineinfile", "line"), + ("ansible.legacy.lineinfile", "insertafter"), + ("ansible.legacy.lineinfile", "insertbefore"), + ("ansible.legacy.lineinfile", "regexp"), + ("ansible.legacy.lineinfile", "line"), + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + action = task["action"]["__ansible_module__"] + for k, v, _ in nested_items_path(task): + if isinstance(k, str) and "\t" in k: + return True + if isinstance(v, str) and "\t" in v and (action, k) not in self.allow_list: + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_no_tabs_rule(default_rules_collection: RulesCollection) -> None: + """Test rule matches.""" + results = Runner( + "examples/playbooks/rule-no-tabs.yml", rules=default_rules_collection + ).run() + assert results[0].linenumber == 10 + assert results[0].message == NoTabsRule().shortdesc + assert len(results) == 1 diff --git a/src/ansiblelint/rules/only_builtins.md b/src/ansiblelint/rules/only_builtins.md new file mode 100644 index 0000000..750e194 --- /dev/null +++ b/src/ansiblelint/rules/only_builtins.md @@ -0,0 +1,36 @@ +# only-builtins + +This rule checks that playbooks use actions from the `ansible.builtin` collection only. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - only-builtins +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Deploy a Helm chart for Prometheus + kubernetes.core.helm: # <- Uses a non-builtin collection. + name: test + chart_ref: stable/prometheus + release_namespace: monitoring + create_namespace: true +``` + +## Correct Code + +```yaml +- name: Example playbook + hosts: localhost + tasks: + - name: Run a shell command + ansible.builtin.shell: echo This playbook uses actions from the builtin collection only. +``` diff --git a/src/ansiblelint/rules/only_builtins.py b/src/ansiblelint/rules/only_builtins.py new file mode 100644 index 0000000..d84b9d7 --- /dev/null +++ b/src/ansiblelint/rules/only_builtins.py @@ -0,0 +1,94 @@ +"""Rule definition for usage of builtin actions only.""" +from __future__ import annotations + +import sys +from typing import Any + +from ansiblelint.config import options +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.rules.fqcn import builtins +from ansiblelint.skip_utils import is_nested_task + + +class OnlyBuiltinsRule(AnsibleLintRule): + """Use only builtin actions.""" + + id = "only-builtins" + severity = "MEDIUM" + description = "Check whether the playbook uses anything but ``ansible.builtin``" + tags = ["opt-in", "experimental"] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module_original__"] + + allowed_collections = [ + "ansible.builtin", + "ansible.legacy", + ] + options.only_builtins_allow_collections + allowed_modules = builtins + options.only_builtins_allow_modules + + is_allowed = ( + any(module.startswith(f"{prefix}.") for prefix in allowed_collections) + or module in allowed_modules + ) + + return not is_allowed and not is_nested_task(task) + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + # pylint: disable=ungrouped-imports + import pytest + + from ansiblelint.constants import SUCCESS_RC, VIOLATIONS_FOUND_RC + from ansiblelint.testing import RunFromText, run_ansible_lint + + SUCCESS_PLAY = """ +- hosts: localhost + tasks: + - name: A block + block: + - name: Shell (fqcn) + ansible.builtin.shell: echo This rule should not get matched by the only-builtins rule + - name: Command with legacy FQCN + ansible.legacy.command: echo This rule should not get matched by the only-builtins rule + """ + + def test_only_builtins_fail() -> None: + """Test rule matches.""" + result = run_ansible_lint( + "--strict", + "--warn-list=", + "--enable-list", + "only-builtins", + "examples/playbooks/rule-only-builtins.yml", + ) + assert result.returncode == VIOLATIONS_FOUND_RC + assert "Failed" in result.stderr + assert "warning(s)" in result.stderr + assert "only-builtins: Use only builtin actions" in result.stdout + + def test_only_builtins_allow() -> None: + """Test rule doesn't match.""" + conf_path = "examples/playbooks/.ansible-lint-only-builtins-allow" + result = run_ansible_lint( + f"--config-file={conf_path}", + "--strict", + "--warn-list=", + "--enable-list", + "only-builtins", + "examples/playbooks/rule-only-builtins.yml", + ) + assert "only-builtins" not in result.stdout + assert result.returncode == SUCCESS_RC + + @pytest.mark.parametrize( + "rule_runner", (OnlyBuiltinsRule,), indirect=["rule_runner"] + ) + def test_only_builtin_pass(rule_runner: RunFromText) -> None: + """Test rule does not match.""" + results = rule_runner.run_playbook(SUCCESS_PLAY) + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/package_latest.md b/src/ansiblelint/rules/package_latest.md new file mode 100644 index 0000000..c7e0d82 --- /dev/null +++ b/src/ansiblelint/rules/package_latest.md @@ -0,0 +1,71 @@ +# package-latest + +This rule checks that package managers install software in a controlled, safe manner. + +Package manager modules, such as `ansible.builtin.yum`, include a `state` parameter that configures how Ansible installs software. +In production environments, you should set `state` to `present` and specify a target version to ensure that packages are installed to a planned and tested version. + +Setting `state` to `latest` not only installs software, it performs an update and installs additional packages. +This can result in performance degradation or loss of service. +If you do want to update packages to the latest version, you should also set the `update_only` parameter to `true` to avoid installing additional packages. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Install Ansible + ansible.builtin.yum: + name: ansible + state: latest # <- Installs the latest package. + + - name: Install Ansible-lint + ansible.builtin.pip: + name: ansible-lint + args: + state: latest # <- Installs the latest package. + + - name: Install some-package + ansible.builtin.package: + name: some-package + state: latest # <- Installs the latest package. + + - name: Install Ansible with update_only to false + ansible.builtin.yum: + name: sudo + state: latest + update_only: false # <- Updates and installs packages. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Install Ansible + ansible.builtin.yum: + name: ansible-2.12.7.0 + state: present # <- Pins the version to install with yum. + + - name: Install Ansible-lint + ansible.builtin.pip: + name: ansible-lint + args: + state: present + version: 5.4.0 # <- Pins the version to install with pip. + + - name: Install some-package + ansible.builtin.package: + name: some-package + state: present # <- Ensures the package is installed. + + - name: Update Ansible with update_only to true + ansible.builtin.yum: + name: sudo + state: latest + update_only: true # <- Updates but does not install additional packages. +``` diff --git a/src/ansiblelint/rules/package_latest.py b/src/ansiblelint/rules/package_latest.py new file mode 100644 index 0000000..dd3074a --- /dev/null +++ b/src/ansiblelint/rules/package_latest.py @@ -0,0 +1,82 @@ +"""Implementations of the package-latest rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class PackageIsNotLatestRule(AnsibleLintRule): + """Package installs should not use latest.""" + + id = "package-latest" + description = ( + "Package installs should use ``state=present`` with or without a version" + ) + severity = "VERY_LOW" + tags = ["idempotency"] + version_added = "historic" + + _package_managers = [ + # spell-checker: disable + "apk", + "apt", + "bower", + "bundler", + "dnf", + "easy_install", + "gem", + "homebrew", + "jenkins_plugin", + "npm", + "openbsd_package", + "openbsd_pkg", + "package", + "pacman", + "pear", + "pip", + "pkg5", + "pkgutil", + "portage", + "slackpkg", + "sorcery", + "swdepot", + "win_chocolatey", + "yarn", + "yum", + "zypper", + # spell-checker: enable + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + return ( + task["action"]["__ansible_module__"] in self._package_managers + and not task["action"].get("version") + and not task["action"].get("update_only") + and task["action"].get("state") == "latest" + ) diff --git a/src/ansiblelint/rules/partial_become.md b/src/ansiblelint/rules/partial_become.md new file mode 100644 index 0000000..01f9dae --- /dev/null +++ b/src/ansiblelint/rules/partial_become.md @@ -0,0 +1,42 @@ +# partial-become + +This rule checks that privilege escalation is activated when changing users. + +To perform an action as a different user with the `become_user` directive, you +must set `become: true`. + +!!! warning + + While Ansible inherits have of `become` and `become_user` from upper levels, + like play level or command line, we do not look at these values. This rule + requires you to be explicit and always define both in the same place, mainly + in order to prevent accidents when some tasks are moved from one location to + another one. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Start the httpd service as the apache user + ansible.builtin.service: + name: httpd + state: started + become_user: apache # <- Does not change the user because "become: true" is not set. +``` + +## Correct Code + +```yaml +- name: Example playbook + hosts: localhost + tasks: + - name: Start the httpd service as the apache user + ansible.builtin.service: + name: httpd + state: started + become: true # <- Activates privilege escalation. + become_user: apache # <- Changes the user with the desired privileges. +``` diff --git a/src/ansiblelint/rules/partial_become.py b/src/ansiblelint/rules/partial_become.py new file mode 100644 index 0000000..f3a3f72 --- /dev/null +++ b/src/ansiblelint/rules/partial_become.py @@ -0,0 +1,133 @@ +"""Implementation of partial-become rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import sys +from functools import reduce +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +def _get_subtasks(data: dict[str, Any]) -> list[Any]: + result: list[Any] = [] + block_names = [ + "tasks", + "pre_tasks", + "post_tasks", + "handlers", + "block", + "always", + "rescue", + ] + for name in block_names: + if data and name in data: + result += data[name] or [] + return result + + +def _nested_search(term: str, data: dict[str, Any]) -> Any: + if data and term in data: + return True + return reduce( + (lambda x, y: x or _nested_search(term, y)), _get_subtasks(data), False + ) + + +def _become_user_without_become(becomeuserabove: bool, data: dict[str, Any]) -> Any: + if "become" in data: + # If become is in lineage of tree then correct + return False + if "become_user" in data and _nested_search("become", data): + # If 'become_user' on tree and become somewhere below + # we must check for a case of a second 'become_user' without a + # 'become' in its lineage + subtasks = _get_subtasks(data) + return reduce( + (lambda x, y: x or _become_user_without_become(False, y)), subtasks, False + ) + if _nested_search("become_user", data): + # Keep searching down if 'become_user' exists in the tree below current task + subtasks = _get_subtasks(data) + return len(subtasks) == 0 or reduce( + ( + lambda x, y: x + or _become_user_without_become( + becomeuserabove or "become_user" in data, y + ) + ), + subtasks, + False, + ) + # If at bottom of tree, flag up if 'become_user' existed in the lineage of the tree and + # 'become' was not. This is an error if any lineage has a 'become_user' but no become + return becomeuserabove + + +class BecomeUserWithoutBecomeRule(AnsibleLintRule): + """become_user requires become to work as expected.""" + + id = "partial-become" + description = "``become_user`` without ``become`` will not actually change user" + severity = "VERY_HIGH" + tags = ["unpredictability"] + version_added = "historic" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind == "playbook": + result = _become_user_without_become(False, data) + if result: + return [ + self.create_matcherror( + message=self.shortdesc, + filename=file, + linenumber=data[LINE_NUMBER_KEY], + ) + ] + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_partial_become_positive() -> None: + """Positive test for partial-become.""" + collection = RulesCollection() + collection.register(BecomeUserWithoutBecomeRule()) + success = "examples/playbooks/rule-partial-become-without-become-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_partial_become_negative() -> None: + """Negative test for partial-become.""" + collection = RulesCollection() + collection.register(BecomeUserWithoutBecomeRule()) + failure = "examples/playbooks/rule-partial-become-without-become-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 3 diff --git a/src/ansiblelint/rules/playbook_extension.md b/src/ansiblelint/rules/playbook_extension.md new file mode 100644 index 0000000..dd0e475 --- /dev/null +++ b/src/ansiblelint/rules/playbook_extension.md @@ -0,0 +1,14 @@ +# playbook-extension + +This rule checks the file extension for playbooks is either `.yml` or `.yaml`. +Ansible playbooks are expressed in YAML format with minimal syntax. + +The [YAML syntax](https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html#yaml-syntax) reference provides additional detail. + +## Problematic Code + +This rule is triggered if Ansible playbooks do not have a file extension or use an unsupported file extension such as `playbook.json` or `playbook.xml`. + +## Correct Code + +Save Ansible playbooks as valid YAML with the `.yml` or `.yaml` file extension. diff --git a/src/ansiblelint/rules/playbook_extension.py b/src/ansiblelint/rules/playbook_extension.py new file mode 100644 index 0000000..491b1fc --- /dev/null +++ b/src/ansiblelint/rules/playbook_extension.py @@ -0,0 +1,53 @@ +"""Implementation of playbook-extension rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import os +import sys + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.runner import Runner + + +class PlaybookExtensionRule(AnsibleLintRule): + """Use ".yml" or ".yaml" playbook extension.""" + + id = "playbook-extension" + description = 'Playbooks should have the ".yml" or ".yaml" extension' + severity = "MEDIUM" + tags = ["formatting"] + done: list[str] = [] + version_added = "v4.0.0" + + def matchyaml(self, file: Lintable) -> list[MatchError]: + result: list[MatchError] = [] + if file.kind != "playbook": + return result + path = str(file.path) + ext = os.path.splitext(path) + if ext[1] not in [".yml", ".yaml"] and path not in self.done: + self.done.append(path) + result.append(self.create_matcherror(filename=file)) + return result + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + (pytest.param("examples/playbooks/play-without-extension", 1, id="fail"),), + ) + def test_playbook_extension(file: str, expected: int) -> None: + """The ini_file module does not accept preserve mode.""" + rules = RulesCollection() + rules.register(PlaybookExtensionRule()) + results = Runner(Lintable(file, kind="playbook"), rules=rules).run() + assert len(results) == expected + for result in results: + assert result.tag == "playbook-extension" diff --git a/src/ansiblelint/rules/risky_file_permissions.md b/src/ansiblelint/rules/risky_file_permissions.md new file mode 100644 index 0000000..a04e12d --- /dev/null +++ b/src/ansiblelint/rules/risky_file_permissions.md @@ -0,0 +1,57 @@ +# risky-file-permissions + +This rule is triggered by various modules that could end up creating new files +on disk with permissions that might be too open, or unpredictable. Please read +the documentation of each module carefully to understand the implications of +using different argument values, as these make the difference between using the +module safely or not. The fix depends on each module and also your particular +situation. + +Some modules have a `create` argument that defaults to `true`. For those you +either need to set `create: false` or provide some permissions like `mode: 0600` +to make the behavior predictable and not dependent on the current system +settings. + +Modules that are checked: + +- [`ansible.builtin.assemble`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/assemble_module.html) +- [`ansible.builtin.copy`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html) +- [`ansible.builtin.file`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html) +- [`ansible.builtin.get_url`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/get_url_module.html) +- [`ansible.builtin.replace`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/replace_module.html) +- [`ansible.builtin.template`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/template_module.html) +- [`community.general.archive`](https://docs.ansible.com/ansible/latest/collections/community/general/archive_module.html) +- [`community.general.ini_file`](https://docs.ansible.com/ansible/latest/collections/community/general/ini_file_module.html) + +!!! warning + + This rule does not take [module_defaults](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_module_defaults.html) configuration into account. + There are currently no plans to implement this feature because changing task location can also change task behavior. + +## Problematic code + +```yaml +--- +- name: Unsafe example of using ini_file + community.general.ini_file: + path: foo + create: true + mode: preserve +``` + +## Correct code + +```yaml +--- +- name: Safe example of using ini_file (1st solution) + community.general.ini_file: + path: foo + create: false # prevents creating a file with potentially insecure permissions + mode: preserve + +- name: Safe example of using ini_file (2nd solution) + community.general.ini_file: + path: foo + mode: 0600 # explicitly sets the desired permissions, to make the results predictable + mode: preserve +``` diff --git a/src/ansiblelint/rules/risky_file_permissions.py b/src/ansiblelint/rules/risky_file_permissions.py new file mode 100644 index 0000000..b689315 --- /dev/null +++ b/src/ansiblelint/rules/risky_file_permissions.py @@ -0,0 +1,156 @@ +# Copyright (c) 2020 Sorin Sbarnea <sorin.sbarnea@gmail.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""MissingFilePermissionsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +# Despite documentation mentioning 'preserve' only these modules support it: +_modules_with_preserve = ( + "copy", + "template", +) + +_MODULES: set[str] = { + "archive", + "community.general.archive", + "assemble", + "ansible.builtin.assemble", + "copy", # supports preserve + "ansible.builtin.copy", + "file", + "ansible.builtin.file", + "get_url", + "ansible.builtin.get_url", + "replace", # implicit preserve behavior but mode: preserve is invalid + "ansible.builtin.replace", + "template", # supports preserve + "ansible.builtin.template", + # 'unarchive', # disabled because .tar.gz files can have permissions inside +} + +_MODULES_WITH_CREATE: dict[str, bool] = { + "blockinfile": False, + "ansible.builtin.blockinfile": False, + "htpasswd": True, + "community.general.htpasswd": True, + "ini_file": True, + "community.general.ini_file": True, + "lineinfile": False, + "ansible.builtin.lineinfile": False, +} + + +class MissingFilePermissionsRule(AnsibleLintRule): + """File permissions unset or incorrect.""" + + id = "risky-file-permissions" + description = ( + "Missing or unsupported mode parameter can cause unexpected file " + "permissions based " + "on version of Ansible being used. Be explicit, like `mode: 0644` to " + "avoid hitting this rule. Special `preserve` value is accepted " + f"only by {', '.join([f'`{x}`' for x in _modules_with_preserve])} modules." + ) + link = "https://github.com/ansible/ansible/issues/71200" + severity = "VERY_HIGH" + tags = ["unpredictability"] + version_added = "v4.3.0" + + _modules = _MODULES + _modules_with_create = _MODULES_WITH_CREATE + + # pylint: disable=too-many-return-statements + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + mode = task["action"].get("mode", None) + + if module not in self._modules and module not in self._modules_with_create: + return False + + if mode == "preserve" and module not in _modules_with_preserve: + return True + + if module in self._modules_with_create: + create = task["action"].get("create", self._modules_with_create[module]) + return create and mode is None + + # A file that doesn't exist cannot have a mode + if task["action"].get("state", None) == "absent": + return False + + # A symlink always has mode 0777 + if task["action"].get("state", None) == "link": + return False + + # Recurse on a directory does not allow for an uniform mode + if task["action"].get("recurse", None): + return False + + # The file module does not create anything when state==file (default) + if module == "file" and task["action"].get("state", "file") == "file": + return False + + # replace module is the only one that has a valid default preserve + # behavior, but we want to trigger rule if user used incorrect + # documentation and put 'preserve', which is not supported. + if module == "replace" and mode is None: + return False + + return mode is None + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param( + "examples/playbooks/rule-risky-file-permissions-pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/rule-risky-file-permissions-fail.yml", + 11, + id="fails", + ), + ), + ) + def test_risky_file_permissions(file: str, expected: int) -> None: + """The ini_file module does not accept preserve mode.""" + collection = RulesCollection() + collection.register(MissingFilePermissionsRule()) + runner = RunFromText(collection) + results = runner.run(file) + assert len(results) == expected + for result in results: + assert result.tag == "risky-file-permissions" diff --git a/src/ansiblelint/rules/risky_octal.md b/src/ansiblelint/rules/risky_octal.md new file mode 100644 index 0000000..a2f22eb --- /dev/null +++ b/src/ansiblelint/rules/risky_octal.md @@ -0,0 +1,49 @@ +# risky-octal + +This rule checks that octal file permissions are strings that contain a leading +zero or are written in +[symbolic modes](https://www.gnu.org/software/findutils/manual/html_node/find_html/Symbolic-Modes.html), +such as `u+rwx` or `u=rw,g=r,o=r`. + +Using integers or octal values in YAML can result in unexpected behavior. For +example, the YAML loader interprets `0644` as the decimal number `420` but +putting `644` there will produce very different results. + +Modules that are checked: + +- [`ansible.builtin.assemble`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/assemble_module.html) +- [`ansible.builtin.copy`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html) +- [`ansible.builtin.file`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html) +- [`ansible.builtin.replace`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/replace_module.html) +- [`ansible.builtin.template`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/template_module.html) + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Unsafe example of declaring Numeric file permissions + ansible.builtin.file: + path: /etc/foo.conf + owner: foo + group: foo + mode: 644 +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Safe example of declaring Numeric file permissions (1st solution) + ansible.builtin.file: + path: /etc/foo.conf + owner: foo + group: foo + mode: "0644" # <- quoting and the leading zero will prevent surprises + # "0o644" is also a valid alternative. +``` diff --git a/src/ansiblelint/rules/risky_octal.py b/src/ansiblelint/rules/risky_octal.py new file mode 100644 index 0000000..b0aadac --- /dev/null +++ b/src/ansiblelint/rules/risky_octal.py @@ -0,0 +1,193 @@ +"""Implementation of risky-octal rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.runner import Runner + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class OctalPermissionsRule(AnsibleLintRule): + """Octal file permissions must contain leading zero or be a string.""" + + id = "risky-octal" + description = ( + "Numeric file permissions without leading zero can behave " + "in unexpected ways." + ) + link = "https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html" + severity = "VERY_HIGH" + tags = ["formatting"] + version_added = "historic" + + _modules = [ + "assemble", + "copy", + "file", + "ini_file", + "lineinfile", + "replace", + "synchronize", + "template", + "unarchive", + ] + + @staticmethod + def is_invalid_permission(mode: int) -> bool: + """Check if permissions are valid. + + Sensible file permission modes don't have write bit set when read bit + is not set and don't have execute bit set when user execute bit is + not set. + + Also, user permissions are more generous than group permissions and + user and group permissions are more generous than world permissions. + """ + other_write_without_read = ( + mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1) + ) + group_write_without_read = ( + (mode >> 3) % 8 + and (mode >> 3) % 8 < 4 + and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1) + ) + user_write_without_read = ( + (mode >> 6) % 8 and (mode >> 6) % 8 < 4 and not (mode >> 6) % 8 == 1 + ) + other_more_generous_than_group = mode % 8 > (mode >> 3) % 8 + other_more_generous_than_user = mode % 8 > (mode >> 6) % 8 + group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8 + + return bool( + other_write_without_read + or group_write_without_read + or user_write_without_read + or other_more_generous_than_group + or other_more_generous_than_user + or group_more_generous_than_user + ) + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in self._modules: + mode = task["action"].get("mode", None) + + if isinstance(mode, str): + return False + + if isinstance(mode, int) and self.is_invalid_permission(mode): + return f'`mode: {mode}` should have a string value with leading zero `mode: "0{mode:o}"` or use symbolic mode.' + return False + + +if "pytest" in sys.modules: + import pytest + + VALID_MODES = [ + 0o777, + 0o775, + 0o770, + 0o755, + 0o750, + 0o711, + 0o710, + 0o700, + 0o666, + 0o664, + 0o660, + 0o644, + 0o640, + 0o600, + 0o555, + 0o551, + 0o550, + 0o511, + 0o510, + 0o500, + 0o444, + 0o440, + 0o400, + ] + + INVALID_MODES = [ + 777, + 775, + 770, + 755, + 750, + 711, + 710, + 700, + 666, + 664, + 660, + 644, + 640, + 622, + 620, + 600, + 555, + 551, + 550, # 511 == 0o777, 510 == 0o776, 500 == 0o764 + 444, + 440, + 400, + ] + + @pytest.mark.parametrize( + ("file", "failures"), + ( + pytest.param("examples/playbooks/rule-risky-octal-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/rule-risky-octal-fail.yml", 4, id="fail"), + ), + ) + def test_octal(file: str, failures: int) -> None: + """Test that octal permissions are valid.""" + collection = RulesCollection() + collection.register(OctalPermissionsRule()) + results = Runner(file, rules=collection).run() + + assert len(results) == failures + for result in results: + assert result.rule.id == "risky-octal" + + def test_octal_valid_modes() -> None: + """Test that octal modes are valid.""" + rule = OctalPermissionsRule() + for mode in VALID_MODES: + assert not rule.is_invalid_permission( + mode + ), f"0o{mode:o} should be a valid mode" + + def test_octal_invalid_modes() -> None: + """Test that octal modes are invalid.""" + rule = OctalPermissionsRule() + for mode in INVALID_MODES: + assert rule.is_invalid_permission( + mode + ), f"{mode:d} should be an invalid mode" diff --git a/src/ansiblelint/rules/risky_shell_pipe.md b/src/ansiblelint/rules/risky_shell_pipe.md new file mode 100644 index 0000000..0c222a9 --- /dev/null +++ b/src/ansiblelint/rules/risky_shell_pipe.md @@ -0,0 +1,35 @@ +# risky-shell-pipe + +This rule checks for the bash `pipefail` option with the Ansible `shell` module. + +You should always set `pipefail` when piping output from a command to another. +The return status of a pipeline is the exit status of the command. +The `pipefail` option ensures that tasks fail as expected if the first command fails. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Pipeline without pipefail + shell: false | cat +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + become: no + tasks: + - name: Pipeline with pipefail + shell: set -o pipefail && false | cat + + - name: Pipeline with pipefail, multi-line + shell: | + set -o pipefail # <-- adding this will prevent surprises + false | cat +``` diff --git a/src/ansiblelint/rules/risky_shell_pipe.py b/src/ansiblelint/rules/risky_shell_pipe.py new file mode 100644 index 0000000..f766cf1 --- /dev/null +++ b/src/ansiblelint/rules/risky_shell_pipe.py @@ -0,0 +1,53 @@ +"""Implementation of risky-shell-pipe rule.""" +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ShellWithoutPipefail(AnsibleLintRule): + """Shells that use pipes should set the pipefail option.""" + + id = "risky-shell-pipe" + description = ( + "Without the pipefail option set, a shell command that " + "implements a pipeline can fail and still return 0. If " + "any part of the pipeline other than the terminal command " + "fails, the whole pipeline will still return 0, which may " + "be considered a success by Ansible. " + "Pipefail is available in the bash shell." + ) + severity = "MEDIUM" + tags = ["command-shell"] + version_added = "v4.1.0" + + _pipefail_re = re.compile(r"^\s*set.*[+-][A-Za-z]*o\s*pipefail", re.M) + _pipe_re = re.compile(r"(?<!\|)\|(?!\|)") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["__ansible_action_type__"] != "task": + return False + + if task["action"]["__ansible_module__"] != "shell": + return False + + if task.get("ignore_errors"): + return False + + jinja_stripped_cmd = self.unjinja( + " ".join(task["action"].get("__ansible_arguments__", [])) + ) + + return bool( + self._pipe_re.search(jinja_stripped_cmd) + and not self._pipefail_re.search(jinja_stripped_cmd) + and not convert_to_boolean(task["action"].get("ignore_errors", False)) + ) diff --git a/src/ansiblelint/rules/role_name.md b/src/ansiblelint/rules/role_name.md new file mode 100644 index 0000000..28aa8b8 --- /dev/null +++ b/src/ansiblelint/rules/role_name.md @@ -0,0 +1,36 @@ +# role-name + +This rule checks role names to ensure they conform with requirements. + +Role names must contain only lowercase alphanumeric characters and the underscore `_` character. +Role names must also start with an alphabetic character. + +For more information see the [roles directory](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#roles-directory) topic in Ansible documentation. + +`role-name[path]` message tells you to avoid using paths when importing roles. +You should only rely on Ansible's ability to find the role and refer to them +using fully qualified names. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + roles: + - 1myrole # <- Does not start with an alphabetic character. + - myrole2[*^ # <- Contains invalid special characters. + - myRole_3 # <- Contains uppercase alphabetic characters. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + roles: + - myrole1 # <- Starts with an alphabetic character. + - myrole2 # <- Contains only alphanumeric characters. + - myrole_3 # <- Contains only lowercase alphabetic characters. +``` diff --git a/src/ansiblelint/rules/role_name.py b/src/ansiblelint/rules/role_name.py new file mode 100644 index 0000000..12989c8 --- /dev/null +++ b/src/ansiblelint/rules/role_name.py @@ -0,0 +1,160 @@ +"""Implementation of role-name rule.""" +# Copyright (c) 2020 Gael Chamoulaud <gchamoul@redhat.com> +# Copyright (c) 2020 Sorin Sbarnea <ssbarnea@redhat.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import re +import sys +from functools import cache +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import ROLE_IMPORT_ACTION_NAMES +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import parse_yaml_from_file + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +ROLE_NAME_REGEX = re.compile(r"^[a-z][a-z0-9_]*$") + + +def _remove_prefix(text: str, prefix: str) -> str: + return re.sub(rf"^{re.escape(prefix)}", "", text) + + +@cache +def _match_role_name_regex(role_name: str) -> bool: + return ROLE_NAME_REGEX.match(role_name) is not None + + +class RoleNames(AnsibleLintRule): + # Unable to use f-strings due to flake8 bug with AST parsing + """Role name {0} does not match ``^[a-z][a-z0-9_]*$`` pattern.""" + + id = "role-name" + description = ( + "Role names are now limited to contain only lowercase alphanumeric " + "characters, plus underline and start with an alpha character." + ) + link = "https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#roles-directory" + severity = "HIGH" + tags = ["deprecations", "metadata"] + version_added = "v6.8.5" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results = [] + if task["action"]["__ansible_module__"] in ROLE_IMPORT_ACTION_NAMES: + name = task["action"].get("name", "") + if "/" in name: + results.append( + self.create_matcherror( + f"Avoid using paths when importing roles. ({name})", + filename=file, + linenumber=task["action"].get("__line__", task["__line__"]), + tag=f"{self.id}[path]", + ) + ) + return results + + def matchdir(self, lintable: Lintable) -> list[MatchError]: + return self.matchyaml(lintable) + + def matchyaml(self, file: Lintable) -> list[MatchError]: + result: list[MatchError] = [] + + if file.kind not in ("meta", "role", "playbook"): + return result + + if file.kind == "playbook": + for play in file.data: + if "roles" in play: + line = play["__line__"] + for role in play["roles"]: + if isinstance(role, dict): + line = role["__line__"] + role_name = role["role"] + elif isinstance(role, str): + role_name = role + if "/" in role_name: + result.append( + self.create_matcherror( + f"Avoid using paths when importing roles. ({role_name})", + filename=file, + linenumber=line, + tag=f"{self.id}[path]", + ) + ) + return result + + if file.kind == "role": + role_name = self._infer_role_name( + meta=file.path / "meta" / "main.yml", default=file.path.name + ) + else: + role_name = self._infer_role_name( + meta=file.path, default=file.path.resolve().parents[1].name + ) + + role_name = _remove_prefix(role_name, "ansible-role-") + if role_name and not _match_role_name_regex(role_name): + result.append( + self.create_matcherror( + filename=file, + message=self.shortdesc.format(role_name), + ) + ) + return result + + @staticmethod + def _infer_role_name(meta: Path, default: str) -> str: + if meta.is_file(): + meta_data = parse_yaml_from_file(str(meta)) + if meta_data: + try: + return str(meta_data["galaxy_info"]["role_name"]) + except KeyError: + pass + return default + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failure"), + (pytest.param("examples/playbooks/rule-role-name-path.yml", 3, id="fail"),), + ) + def test_role_name_path( + default_rules_collection: RulesCollection, test_file: str, failure: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.tag == "role-name[path]" + assert len(results) == failure diff --git a/src/ansiblelint/rules/run_once.md b/src/ansiblelint/rules/run_once.md new file mode 100644 index 0000000..b7df02c --- /dev/null +++ b/src/ansiblelint/rules/run_once.md @@ -0,0 +1,63 @@ +# run-once + +This rule warns against the use of `run_once` when the `strategy` is set to +`free`. + +This rule can produce the following messages: + +- `run-once[play]`: Play uses `strategy: free`. +- `run-once[task]`: Using `run_once` may behave differently if the `strategy` is + set to `free`. + +For more information see the following topics in Ansible documentation: + +- [free strategy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/free_strategy.html#free-strategy) +- [selecting a strategy](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_strategies.html#selecting-a-strategy) +- [run_once(playbook keyword) more info](https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html) + +!!! warning + + This rule will always trigger regardless of the value configured inside the 'strategy' field. That is because the effective value used at runtime can be different than the value inside the file. For example, ansible command line arguments can alter it. + +It is perfectly fine to add `# noqa: run-once[task]` to mark the warning as +acknowledged and ignored. + +## Problematic Code + +```yaml +--- +- name: "Example with run_once" + hosts: all + strategy: free # <-- avoid use of strategy as free + gather_facts: false + tasks: + - name: Task with run_once + ansible.builtin.debug: + msg: "Test" + run_once: true # <-- avoid use of strategy as free at play level when using run_once at task level +``` + +## Correct Code + +```yaml +- name: "Example without run_once" + hosts: all + gather_facts: false + tasks: + - name: Task without run_once + ansible.builtin.debug: + msg: "Test" +``` + +```yaml +- name: "Example of using run_once with strategy other than free" + hosts: all + strategy: linear + # strategy: free # noqa: run-once[play] (if using strategy: free can skip it this way) + gather_facts: false + tasks: # <-- use noqa to disable rule violations for specific tasks + - name: Task with run_once # noqa: run-once[task] + ansible.builtin.debug: + msg: "Test" + run_once: true +``` diff --git a/src/ansiblelint/rules/run_once.py b/src/ansiblelint/rules/run_once.py new file mode 100644 index 0000000..f8a059e --- /dev/null +++ b/src/ansiblelint/rules/run_once.py @@ -0,0 +1,87 @@ +"""Optional Ansible-lint rule to warn use of run_once with strategy free.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RunOnce(AnsibleLintRule): + """Run once should use strategy other than free.""" + + id = "run-once" + link = "https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html" + description = "When using run_once, we should avoid using strategy as free." + + tags = ["idiom"] + severity = "MEDIUM" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + # If the Play uses the 'strategy' and it's value is set to free + + if not file or file.kind != "playbook" or not data: + return [] + + strategy = data.get("strategy", None) + run_once = data.get("run_once", False) + if (not strategy and not run_once) or strategy != "free": + return [] + return [ + self.create_matcherror( + message="Play uses strategy: free", + filename=file, + tag=f"{self.id}[play]", + # pylint: disable=protected-access + linenumber=strategy._line_number, + ) + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for a task.""" + if not file or file.kind != "playbook": + return [] + + run_once = task.get("run_once", False) + if not run_once: + return [] + return [ + self.create_matcherror( + message="Using run_once may behave differently if strategy is set to free.", + filename=file, + tag=f"{self.id}[task]", + linenumber=task[LINE_NUMBER_KEY], + ) + ] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failure"), + ( + pytest.param("examples/playbooks/run-once-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/run-once-fail.yml", 2, id="fail"), + ), + ) + def test_run_once( + default_rules_collection: RulesCollection, test_file: str, failure: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == RunOnce().id + assert len(results) == failure diff --git a/src/ansiblelint/rules/schema.md b/src/ansiblelint/rules/schema.md new file mode 100644 index 0000000..9327bca --- /dev/null +++ b/src/ansiblelint/rules/schema.md @@ -0,0 +1,80 @@ +# schema + +The `schema` rule validates Ansible metadata files against JSON schemas. These +schemas ensure the compatibility of Ansible syntax content across versions. + +This `schema` rule is **mandatory**. You cannot use inline `noqa` comments to +ignore it. + +Ansible-lint validates the `schema` rule before processing other rules. This +prevents unexpected syntax from triggering multiple rule violations. + +## Validated schema + +Ansible-lint currently validates several schemas that are maintained in separate +projects and updated independently to ansible-lint. + +> Report bugs related to schema in their respective repository and not in the +> ansible-lint project. + +Maintained in the [ansible-lint](https://github.com/ansible/ansible-lint) +project: + +- `schema[ansible-lint-config]` validates + [ansible-lint configuration](https://github.com/ansible/ansible-lint/blob/main/src/ansiblelint/schemas/ansible-lint-config.json) + +Maintained in the +[ansible-navigator](https://github.com/ansible/ansible-navigator) project: + +- `schema[ansible-navigator]` validates + [ansible-navigator configuration](https://github.com/ansible/ansible-navigator/blob/main/src/ansible_navigator/data/ansible-navigator.json) + +- `schema[arg_specs]` validates + [module argument specs](https://docs.ansible.com/ansible/latest/dev_guide/developing_program_flow_modules.html#argument-spec) +- `schema[execution-environment]` validates + [execution environments](https://docs.ansible.com/automation-controller/latest/html/userguide/execution_environments.html) +- `schema[galaxy]` validates + [collection metadata](https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html). +- `schema[inventory]` validates + [inventory files](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html) + that match `inventory/*.yml`. +- `schema[meta-runtime]` validates + [runtime information](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#meta-directory-and-runtime-yml) + that matches `meta/runtime.yml` +- `schema[meta]` validates metadata for roles that match `meta/main.yml`. See + [role-dependencies](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies) + or + [role/metadata.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/playbook/role/metadata.py#L79)) + for details. +- `schema[playbook]` validates Ansible playbooks. +- `schema[requirements]` validates Ansible + [requirements](https://docs.ansible.com/ansible/latest/galaxy/user_guide.html#install-multiple-collections-with-a-requirements-file) + files that match `requirements.yml`. +- `schema[tasks]` validates Ansible task files that match `tasks/**/*.yml`. +- `schema[vars]` validates Ansible + [variables](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html) + that match `vars/*.yml` and `defaults/*.yml`. + +## schema[meta] + +For `meta/main.yml` files, Ansible-lint requires a `galaxy_info.standalone` +property that clarifies if a role is an old standalone one or a new one, +collection based: + +```yaml +galaxy_info: + standalone: true # <-- this is a standalone role (not part of a collection) +``` + +Ansible-lint requires the `standalone` key to avoid confusion and provide more +specific error messages. For example, the `meta` schema will require some +properties only for standalone roles or prevent the use of some properties that +are not supported by collections. + +You cannot use an empty `meta/main.yml` file or use only comments in the +`meta/main.yml` file. + +## schema[moves] + +These errors usually look like "foo was moved to bar in 2.10" and indicate +module moves between Ansible versions. diff --git a/src/ansiblelint/rules/schema.py b/src/ansiblelint/rules/schema.py new file mode 100644 index 0000000..26ba1d9 --- /dev/null +++ b/src/ansiblelint/rules/schema.py @@ -0,0 +1,257 @@ +"""Rule definition for JSON Schema Validations.""" +from __future__ import annotations + +import logging +import sys +from typing import Any + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.schemas import JSON_SCHEMAS, validate_file_schema + +_logger = logging.getLogger(__name__) + + +DESCRIPTION_MD = """ Returned errors will not include exact line numbers, but they will mention +the schema name being used as a tag, like ``schema[playbook]``, +``schema[tasks]``. + +This rule is not skippable and stops further processing of the file. + +If incorrect schema was picked, you might want to either: + +* move the file to standard location, so its file is detected correctly. +* use ``kinds:`` option in linter config to help it pick correct file type. +""" + +pre_checks = { + "task": { + "with_flattened": { + "msg": "with_flattened was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + "with_filetree": { + "msg": "with_filetree was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + "with_cartesian": { + "msg": "with_cartesian was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + } +} + + +class ValidateSchemaRule(AnsibleLintRule): + """Perform JSON Schema Validation for known lintable kinds.""" + + description = DESCRIPTION_MD + + id = "schema" + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v6.1.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError | list[MatchError]: + result = [] + for key in pre_checks["task"]: + if key in task: + msg = pre_checks["task"][key]["msg"] + tag = pre_checks["task"][key]["tag"] + result.append( + MatchError( + message=msg, + filename=file, + rule=ValidateSchemaRule(), + details=ValidateSchemaRule.description, + tag=f"schema[{tag}]", + ) + ) + return result + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return JSON validation errors found as a list of MatchError(s).""" + result = [] + if file.kind not in JSON_SCHEMAS: + return [] + + errors = validate_file_schema(file) + if errors: + if errors[0].startswith("Failed to load YAML file"): + _logger.debug( + "Ignored failure to load %s for schema validation, as !vault may cause it." + ) + return [] + + result.append( + MatchError( + message=errors[0], + filename=file, + rule=ValidateSchemaRule(), + details=ValidateSchemaRule.description, + tag=f"schema[{file.kind}]", + ) + ) + return result + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + # pylint: disable=ungrouped-imports + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected"), + ( + ( + "examples/collection/galaxy.yml", + "galaxy", + ["'GPL' is not one of"], + ), + ( + "examples/roles/invalid_requirements_schema/meta/requirements.yml", + "requirements", + ["{'foo': 'bar'} is not valid under any of the given schemas"], + ), + ( + "examples/roles/invalid_meta_schema/meta/main.yml", + "meta", + ["False is not of type 'string'"], + ), + ( + "examples/playbooks/vars/invalid_vars_schema.yml", + "vars", + ["'123' does not match any of the regexes"], + ), + ( + "examples/execution-environment.yml", + "execution-environment", + [], + ), + ( + "examples/ee_broken/execution-environment.yml", + "execution-environment", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ("examples/meta/runtime.yml", "meta-runtime", []), + ( + "examples/broken_collection_meta_runtime/meta/runtime.yml", + "meta-runtime", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + "examples/inventory/production.yml", + "inventory", + [], + ), + ( + "examples/inventory/broken_dev_inventory.yml", + "inventory", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + ".ansible-lint", + "ansible-lint-config", + [], + ), + ( + "examples/.config/ansible-lint.yml", + "ansible-lint-config", + [], + ), + ( + "examples/broken/.ansible-lint", + "ansible-lint-config", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + "examples/ansible-navigator.yml", + "ansible-navigator-config", + [], + ), + ( + "examples/broken/ansible-navigator.yml", + "ansible-navigator-config", + ["Additional properties are not allowed ('ansible' was unexpected)"], + ), + ( + "examples/roles/hello/meta/argument_specs.yml", + "arg_specs", + [], + ), + ( + "examples/roles/broken_argument_specs/meta/argument_specs.yml", + "arg_specs", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ), + ids=( + # "playbook-fail", + "galaxy", + "requirements", + "meta", + "vars", + "ee", + "ee-broken", + "meta-runtime", + "meta-runtime-broken", + "inventory", + "inventory-broken", + "lint-config", + "lint-config2", + "lint-config-broken", + "navigator", + "navigator-broken", + "argspecs", + "argspecs-broken", + ), + ) + def test_schema(file: str, expected_kind: str, expected: list[str]) -> None: + """Validate parsing of ansible output.""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(ValidateSchemaRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == len(expected), results + for idx, result in enumerate(results): + assert result.filename.endswith(file) + assert expected[idx] in result.message + assert result.tag == f"schema[{expected_kind}]" + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected_tag", "count"), + ( + pytest.param( + "examples/playbooks/rule-syntax-moves.yml", + "playbook", + "schema[moves]", + 3, + id="playbook", + ), + ), + ) + def test_schema_moves( + file: str, expected_kind: str, expected_tag: str, count: int + ) -> None: + """Validate ability to detect schema[moves].""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(ValidateSchemaRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == count, results + for result in results: + assert result.filename.endswith(file) + assert result.tag == expected_tag diff --git a/src/ansiblelint/rules/syntax_check.md b/src/ansiblelint/rules/syntax_check.md new file mode 100644 index 0000000..e8197a5 --- /dev/null +++ b/src/ansiblelint/rules/syntax_check.md @@ -0,0 +1,45 @@ +# syntax-check + +Our linter runs `ansible-playbook --syntax-check` on all playbooks, and if any +of these reports a syntax error, this stops any further processing of these +files. + +This error **cannot be disabled** due to being a prerequisite for other steps. +You can exclude these files from linting, but it is better to make sure they can +be loaded by Ansible. This is often achieved by editing the inventory file +and/or `ansible.cfg` so ansible can load required variables. + +If undefined variables cause the failure, you can use the jinja `default()` +filter to provide fallback values, like in the example below. + +This rule is among the few `unskippable` rules that cannot be added to +`skip_list` or `warn_list`. One possible workaround is to add the entire file to +the `exclude_paths`. This is a valid approach for special cases, like testing +fixtures that are invalid on purpose. + +One of the most common sources of errors is a failure to assert the presence of +various variables at the beginning of the playbook. + +This rule can produce messages like below: + +- `syntax-check[empty-playbook]` is raised when a playbook file has no content. + +## Problematic code + +```yaml +--- +- name: + Bad use of variable inside hosts block (wrong assumption of it being + defined) + hosts: "{{ my_hosts }}" + tasks: [] +``` + +## Correct code + +```yaml +--- +- name: Good use of variable inside hosts, without assumptions + hosts: "{{ my_hosts | default([]) }}" + tasks: [] +``` diff --git a/src/ansiblelint/rules/syntax_check.py b/src/ansiblelint/rules/syntax_check.py new file mode 100644 index 0000000..cec94e6 --- /dev/null +++ b/src/ansiblelint/rules/syntax_check.py @@ -0,0 +1,240 @@ +"""Rule definition for ansible syntax check.""" +from __future__ import annotations + +import json +import re +import subprocess +import sys +from dataclasses import dataclass +from typing import Any + +from ansiblelint._internal.rules import BaseRule, RuntimeErrorRule +from ansiblelint.app import get_app +from ansiblelint.config import options +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.logger import timed_info +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import strip_ansi_escape + + +@dataclass +class KnownError: + """Class that tracks result of linting.""" + + tag: str + regex: re.Pattern[str] + + +OUTPUT_PATTERNS = ( + KnownError( + tag="missing-file", + regex=re.compile( + # do not use <filename> capture group for this because we want to report original file, not the missing target one + r"(?P<title>Unable to retrieve file contents)\n(?P<details>Could not find or access '(?P<value>.*)'[^\n]*)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), + KnownError( + tag="specific", + regex=re.compile( + r"^ERROR! (?P<title>[^\n]*)\n\nThe error appears to be in '(?P<filename>[\w\/\.\-]+)': line (?P<line>\d+), column (?P<column>\d+)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), + KnownError( + tag="empty-playbook", + regex=re.compile( + "Empty playbook, nothing to do", re.MULTILINE | re.S | re.DOTALL + ), + ), + KnownError( + tag="malformed", + regex=re.compile( + "^ERROR! (?P<title>A malformed block was encountered while loading a block[^\n]*)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), +) + + +class AnsibleSyntaxCheckRule(AnsibleLintRule): + """Ansible syntax check failed.""" + + id = "syntax-check" + severity = "VERY_HIGH" + tags = ["core", "unskippable"] + version_added = "v5.0.0" + _order = 0 + + @staticmethod + # pylint: disable=too-many-locals,too-many-branches + def _get_ansible_syntax_check_matches(lintable: Lintable) -> list[MatchError]: + """Run ansible syntax check and return a list of MatchError(s).""" + default_rule: BaseRule = AnsibleSyntaxCheckRule() + results = [] + if lintable.kind not in ("playbook", "role"): + return [] + + with timed_info( + "Executing syntax check on %s %s", lintable.kind, lintable.path + ): + # To avoid noisy warnings we pass localhost as current inventory: + # [WARNING]: No inventory was parsed, only implicit localhost is available + # [WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all' + if lintable.kind == "playbook": + cmd = [ + "ansible-playbook", + "-i", + "localhost,", + "--syntax-check", + str(lintable.path.expanduser()), + ] + else: # role + cmd = [ + "ansible", + "localhost", + "--syntax-check", + "--module-name=include_role", + "--args", + f"name={str(lintable.path.expanduser())}", + ] + if options.extra_vars: + cmd.extend(["--extra-vars", json.dumps(options.extra_vars)]) + + # To reduce noisy warnings like + # CryptographyDeprecationWarning: Blowfish has been deprecated + # https://github.com/paramiko/paramiko/issues/2038 + env = get_app().runtime.environ.copy() + env["PYTHONWARNINGS"] = "ignore" + + run = subprocess.run( + cmd, + stdin=subprocess.PIPE, + capture_output=True, + shell=False, # needed when command is a list + text=True, + check=False, + env=env, + ) + + if run.returncode != 0: + message = None + filename = lintable + linenumber = 1 + column = None + tag = None + + stderr = strip_ansi_escape(run.stderr) + stdout = strip_ansi_escape(run.stdout) + if stderr: + details = stderr + if stdout: + details += "\n" + stdout + else: + details = stdout + + for pattern in OUTPUT_PATTERNS: + rule = default_rule + match = re.search(pattern.regex, stderr) + if match: + groups = match.groupdict() + title = groups.get("title", match.group(0)) + details = groups.get("details", "") + linenumber = int(groups.get("line", 1)) + + if "filename" in groups: + filename = Lintable(groups["filename"]) + else: + filename = lintable + column = int(groups.get("column", 1)) + results.append( + MatchError( + message=title, + filename=filename, + linenumber=linenumber, + column=column, + rule=rule, + details=details, + tag=f"{rule.id}[{pattern.tag}]", + ) + ) + + if not results: + rule = RuntimeErrorRule() + message = ( + f"Unexpected error code {run.returncode} from " + f"execution of: {' '.join(cmd)}" + ) + results.append( + MatchError( + message=message, + filename=filename, + linenumber=linenumber, + column=column, + rule=rule, + details=details, + tag=tag, + ) + ) + + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + + def test_get_ansible_syntax_check_matches() -> None: + """Validate parsing of ansible output.""" + lintable = Lintable( + "examples/playbooks/conflicting_action.yml", kind="playbook" + ) + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert result[0].linenumber == 4 + assert result[0].column == 7 + assert ( + result[0].message + == "conflicting action statements: ansible.builtin.debug, ansible.builtin.command" + ) + # We internally convert absolute paths returned by ansible into paths + # relative to current directory. + assert result[0].filename.endswith("/conflicting_action.yml") + assert len(result) == 1 + + def test_empty_playbook() -> None: + """Validate detection of empty-playbook.""" + lintable = Lintable("examples/playbooks/empty_playbook.yml", kind="playbook") + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert result[0].linenumber == 1 + # We internally convert absolute paths returned by ansible into paths + # relative to current directory. + assert result[0].filename.endswith("/empty_playbook.yml") + assert result[0].tag == "syntax-check[empty-playbook]" + assert result[0].message == "Empty playbook, nothing to do" + assert len(result) == 1 + + def test_extra_vars_passed_to_command(config_options: Any) -> None: + """Validate `extra-vars` are passed to syntax check command.""" + config_options.extra_vars = { + "foo": "bar", + "complex_variable": ":{;\t$()", + } + lintable = Lintable("examples/playbooks/extra_vars.yml", kind="playbook") + + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + + assert not result + + def test_syntax_check_role() -> None: + """Validate syntax check of a broken role.""" + lintable = Lintable("examples/playbooks/roles/invalid_due_syntax", kind="role") + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert len(result) == 1, result + assert result[0].linenumber == 2 + assert result[0].filename == "examples/roles/invalid_due_syntax/tasks/main.yml" + assert result[0].tag == "syntax-check[specific]" + assert result[0].message == "no module/action detected in task." diff --git a/src/ansiblelint/rules/var_naming.md b/src/ansiblelint/rules/var_naming.md new file mode 100644 index 0000000..60c069d --- /dev/null +++ b/src/ansiblelint/rules/var_naming.md @@ -0,0 +1,47 @@ +# var-naming + +This rule checks variable names to ensure they conform with requirements. + +Variable names must contain only lowercase alphanumeric characters and the +underscore `_` character. Variable names must also start with either an +alphabetic or underscore `_` character. + +For more information see the [creating valid variable names][var-names] topic in +Ansible documentation and [Naming things (Good Practices for Ansible)][cop]. + +## Settings + +This rule behavior can be changed by altering the below settings: + +```yaml +# .ansible-lint +var_naming_pattern: "^[a-z_][a-z0-9_]*$" +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + vars: + CamelCase: true # <- Contains a mix of lowercase and uppercase characters. + ALL_CAPS: bar # <- Contains only uppercase characters. + v@r!able: baz # <- Contains special characters. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + vars: + lowercase: true # <- Contains only lowercase characters. + no_caps: bar # <- Does not contains uppercase characters. + variable: baz # <- Does not contain special characters. +``` + +[cop]: https://redhat-cop.github.io/automation-good-practices/#_naming_things +[var-names]: + https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#creating-valid-variable-names diff --git a/src/ansiblelint/rules/var_naming.py b/src/ansiblelint/rules/var_naming.py new file mode 100644 index 0000000..945a95d --- /dev/null +++ b/src/ansiblelint/rules/var_naming.py @@ -0,0 +1,242 @@ +"""Implementation of var-naming rule.""" +from __future__ import annotations + +import keyword +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansible.parsing.yaml.objects import AnsibleUnicode + +from ansiblelint.config import options +from ansiblelint.constants import LINE_NUMBER_KEY, SUCCESS_RC +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.runner import Runner +from ansiblelint.skip_utils import get_rule_skips_from_line +from ansiblelint.utils import parse_yaml_from_file + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + +# Should raise var-naming at line [2, 6]. +FAIL_VARS = """--- +CamelCaseIsBad: false # invalid +this_is_valid: # valid because content is a dict, not a variable + CamelCase: ... + ALL_CAPS: ... +ALL_CAPS_ARE_BAD_TOO: ... # invalid +"{{ 'test_' }}var": "value" # valid +CamelCaseButErrorIgnored: true # noqa: var-naming +""" + + +# properties/parameters are prefixed and postfixed with `__` +def is_property(k: str) -> bool: + """Check if key is a property.""" + return k.startswith("__") and k.endswith("__") + + +class VariableNamingRule(AnsibleLintRule): + """All variables should be named using only lowercase and underscores.""" + + id = "var-naming" + severity = "MEDIUM" + tags = ["idiom"] + version_added = "v5.0.10" + needs_raw_task = True + re_pattern = re.compile(options.var_naming_pattern or "^[a-z_][a-z0-9_]*$") + + def is_invalid_variable_name(self, ident: str) -> bool: + """Check if variable name is using right pattern.""" + # Based on https://github.com/ansible/ansible/blob/devel/lib/ansible/utils/vars.py#L235 + if not isinstance(ident, str): + return False + + try: + ident.encode("ascii") + except UnicodeEncodeError: + return False + + if keyword.iskeyword(ident): + return False + + # We want to allow use of jinja2 templating for variable names + if "{{" in ident: + return False + + # previous tests should not be triggered as they would have raised a + # syntax-error when we loaded the files but we keep them here as a + # safety measure. + return not bool(self.re_pattern.match(ident)) + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + results: list[MatchError] = [] + raw_results: list[MatchError] = [] + + if not data or file.kind not in ("tasks", "handlers", "playbook", "vars"): + return results + # If the Play uses the 'vars' section to set variables + our_vars = data.get("vars", {}) + for key in our_vars.keys(): + if self.is_invalid_variable_name(key): + raw_results.append( + self.create_matcherror( + filename=file, + linenumber=key.ansible_pos[1] + if isinstance(key, AnsibleUnicode) + else our_vars[LINE_NUMBER_KEY], + message="Play defines variable '" + + key + + "' within 'vars' section that violates variable naming standards", + tag=f"var-naming[{key}]", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + + return results + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for task based variables.""" + results = [] + # If the task uses the 'vars' section to set variables + our_vars = task.get("vars", {}) + for key in our_vars.keys(): + if self.is_invalid_variable_name(key): + results.append( + self.create_matcherror( + filename=file, + linenumber=our_vars[LINE_NUMBER_KEY], + message=f"Task defines variable within 'vars' section that violates variable naming standards: {key}", + tag=f"var-naming[{key}]", + ) + ) + + # If the task uses the 'set_fact' module + ansible_module = task["action"]["__ansible_module__"] + if ansible_module == "set_fact": + for key in filter( + lambda x: isinstance(x, str) and not x.startswith("__"), + task["action"].keys(), + ): + if self.is_invalid_variable_name(key): + results.append( + self.create_matcherror( + filename=file, + linenumber=task["action"][LINE_NUMBER_KEY], + message=f"Task uses 'set_fact' to define variables that violates variable naming standards: {key}", + tag=f"var-naming[{key}]", + ) + ) + + # If the task registers a variable + registered_var = task.get("register", None) + if registered_var and self.is_invalid_variable_name(registered_var): + results.append( + self.create_matcherror( + filename=file, + linenumber=task[LINE_NUMBER_KEY], + message=f"Task registers a variable that violates variable naming standards: {registered_var}", + tag=f"var-naming[{registered_var}]", + ) + ) + + return results + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches for variables defined in vars files.""" + results: list[MatchError] = [] + raw_results: list[MatchError] = [] + meta_data: dict[AnsibleUnicode, Any] = {} + + if str(file.kind) == "vars" and file.data: + meta_data = parse_yaml_from_file(str(file.path)) + for key in meta_data.keys(): + if self.is_invalid_variable_name(key): + raw_results.append( + self.create_matcherror( + filename=file, + linenumber=key.ansible_pos[1], + message="File defines variable '" + + key + + "' that violates variable naming standards", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + else: + results.extend(super().matchyaml(file)) + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import ( # pylint: disable=ungrouped-imports + RunFromText, + run_ansible_lint, + ) + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param("examples/playbooks/rule-var-naming-fail.yml", 7, id="0"), + pytest.param("examples/Taskfile.yml", 0, id="1"), + ), + ) + def test_invalid_var_name_playbook(file: str, expected: int) -> None: + """Test rule matches.""" + rules = RulesCollection(options=options) + rules.register(VariableNamingRule()) + results = Runner(Lintable(file), rules=rules).run() + # results = rule_runner.run() + assert len(results) == expected + for result in results: + assert result.rule.id == VariableNamingRule.id + # We are not checking line numbers because they can vary between + # different versions of ruamel.yaml (and depending on presence/absence + # of its c-extension) + + @pytest.mark.parametrize( + "rule_runner", (VariableNamingRule,), indirect=["rule_runner"] + ) + def test_invalid_var_name_varsfile(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_role_defaults_main(FAIL_VARS) + assert len(results) == 2 + for result in results: + assert result.rule.id == VariableNamingRule.id + + # list unexpected error lines or non-matching error lines + expected_error_lines = [2, 6] + lines = [i.linenumber for i in results] + error_lines_difference = list( + set(expected_error_lines).symmetric_difference(set(lines)) + ) + assert len(error_lines_difference) == 0 + + def test_var_naming_with_pattern() -> None: + """Test rule matches.""" + role_path = "examples/roles/var_naming_pattern/tasks/main.yml" + conf_path = "examples/roles/var_naming_pattern/.ansible-lint" + result = run_ansible_lint( + f"--config-file={conf_path}", + role_path, + ) + assert result.returncode == SUCCESS_RC + assert "var-naming" not in result.stdout diff --git a/src/ansiblelint/rules/yaml.md b/src/ansiblelint/rules/yaml.md new file mode 100644 index 0000000..03055d0 --- /dev/null +++ b/src/ansiblelint/rules/yaml.md @@ -0,0 +1,92 @@ +# yaml + +This rule checks YAML syntax and is an implementation of `yamllint`. + +You can disable YAML syntax violations by adding `yaml` to the `skip_list` in +your Ansible-lint configuration as follows: + +```yaml +skip_list: + - yaml +``` + +For more fine-grained control, disable violations for specific rules using tag +identifiers in the `yaml[yamllint_rule]` format as follows: + +```yaml +skip_list: + - yaml[trailing-spaces] + - yaml[indentation] +``` + +If you want Ansible-lint to report YAML syntax violations as warnings, and not +fatal errors, add tag identifiers to the `warn_list` in your configuration, for +example: + +```yaml +warn_list: + - yaml[document-start] +``` + +See the +[list of yamllint rules](https://yamllint.readthedocs.io/en/stable/rules.html) +for more information. + +Some of the detailed error codes that you might see are: + +- `yaml[brackets]` - _too few spaces inside empty brackets_, or _too many spaces + inside brackets_ +- `yaml[colons]` - _too many spaces before colon_, or _too many spaces after + colon_ +- `yaml[commas]` - _too many spaces before comma_, or _too few spaces after + comma_ +- `yaml[comments-indentation]` - _Comment not indented like content_ +- `yaml[comments]` - _Too few spaces before comment_, or _Missing starting space + in comment_ +- `yaml[document-start]` - _missing document start "---"_ or _found forbidden + document start "---"_ +- `yaml[empty-lines]` - _too many blank lines (...> ...)_ +- `yaml[indentation]` - _Wrong indentation: expected ... but found ..._ +- `yaml[key-duplicates]` - _Duplication of key "..." in mapping_ +- `yaml[new-line-at-end-of-file]` - _No new line character at the end of file_ +- `yaml[octal-values]`: forbidden implicit or explicit [octal](#octals) value +- `yaml[syntax]` - YAML syntax is broken +- `yaml[trailing-spaces]` - Spaces are found at the end of lines +- `yaml[truthy]` - _Truthy value should be one of ..._ + +## Octals + +As [YAML specification] regarding octal values changed at least 3 times in +[1.1], [1.2.0] and [1.2.2] we now require users to always add quotes around +octal values, so the YAML loaders will all load them as strings, providing a +consistent behavior. This is also safer as JSON does not support octal values +either. + +By default, yamllint does not check for octals but our custom default ruleset +for it does check these. If for some reason, you do not want to follow our +defaults, you can create a `.yamllint` file in your project and this will take +precedence over our defaults. + +## Problematic code + +```yaml +# Missing YAML document start. +foo: 0777 # <-- yaml[octal-values] +foo2: 0o777 # <-- yaml[octal-values] +foo2: ... # <-- yaml[key-duplicates] +bar: ... # <-- yaml[comments-indentation] +``` + +## Correct code + +```yaml +--- +foo: "0777" # <-- Explicitly quoting octal is less risky. +foo2: "0o777" # <-- Explicitly quoting octal is less risky. +bar: ... # Correct comment indentation. +``` + +[1.1]: https://yaml.org/spec/1.1/ +[1.2.0]: https://yaml.org/spec/1.2.0/ +[1.2.2]: https://yaml.org/spec/1.2.2/ +[yaml specification]: https://yaml.org/ diff --git a/src/ansiblelint/rules/yaml_rule.py b/src/ansiblelint/rules/yaml_rule.py new file mode 100644 index 0000000..d731fc0 --- /dev/null +++ b/src/ansiblelint/rules/yaml_rule.py @@ -0,0 +1,179 @@ +"""Implementation of yaml linting rule (yamllint integration).""" +from __future__ import annotations + +import logging +import sys +from typing import TYPE_CHECKING, Iterable + +from yamllint.linter import run as run_yamllint + +from ansiblelint.constants import LINE_NUMBER_KEY, SKIPPED_RULES_KEY +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import load_yamllint_config + +if TYPE_CHECKING: + from typing import Any, Generator + + from ansiblelint.errors import MatchError + +_logger = logging.getLogger(__name__) + + +class YamllintRule(AnsibleLintRule): + """Violations reported by yamllint.""" + + id = "yaml" + severity = "VERY_LOW" + tags = ["formatting", "yaml"] + version_added = "v5.0.0" + config = load_yamllint_config() + has_dynamic_tags = True + link = "https://yamllint.readthedocs.io/en/stable/rules.html" + # ensure this rule runs before most of other common rules + _order = 1 + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific YAML text.""" + matches: list[MatchError] = [] + filtered_matches: list[MatchError] = [] + if str(file.base_kind) != "text/yaml": + return matches + + for problem in run_yamllint( + file.content, YamllintRule.config, filepath=file.path + ): + self.severity = "VERY_LOW" + if problem.level == "error": + self.severity = "MEDIUM" + if problem.desc.endswith("(syntax)"): + self.severity = "VERY_HIGH" + matches.append( + self.create_matcherror( + # yamllint does return lower-case sentences + message=problem.desc.capitalize(), + linenumber=problem.line, + details="", + filename=file, + tag=f"yaml[{problem.rule}]", + ) + ) + + # Now we save inside the file the skips, so they can be removed later, + # especially as these skips can be about other rules than yaml one. + _fetch_skips(file.data, file.line_skips) + + for match in matches: + last_skips = set() + + for line, skips in file.line_skips.items(): + if line > match.linenumber: + break + last_skips = skips + if last_skips.intersection({"skip_ansible_lint", match.rule.id, match.tag}): + continue + filtered_matches.append(match) + + return filtered_matches + + +def _combine_skip_rules(data: Any) -> set[str]: + """Return a consolidated list of skipped rules.""" + result = set(data.get(SKIPPED_RULES_KEY, [])) + tags = data.get("tags", []) + if tags and ( + isinstance(tags, Iterable) + and "skip_ansible_lint" in tags + or tags == "skip_ansible_lint" + ): + result.add("skip_ansible_lint") + return result + + +def _fetch_skips(data: Any, collector: dict[int, set[str]]) -> dict[int, set[str]]: + """Retrieve a dictionary with line: skips by looking recursively in given JSON structure.""" + if hasattr(data, "get") and data.get(LINE_NUMBER_KEY): + rules = _combine_skip_rules(data) + if rules: + collector[data.get(LINE_NUMBER_KEY)].update(rules) + if isinstance(data, Iterable) and not isinstance(data, str): + if isinstance(data, dict): + for entry, value in data.items(): + _fetch_skips(value, collector) + else: # must be some kind of list + for entry in data: + if ( + entry + and hasattr(data, "get") + and LINE_NUMBER_KEY in entry + and SKIPPED_RULES_KEY in entry + and entry[SKIPPED_RULES_KEY] + ): + collector[entry[LINE_NUMBER_KEY]].update(entry[SKIPPED_RULES_KEY]) + _fetch_skips(entry, collector) + return collector + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + # pylint: disable=ungrouped-imports + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected"), + ( + ( + "examples/yamllint/invalid.yml", + "yaml", + [ + 'Missing document start "---"', + 'Duplication of key "foo" in mapping', + "Trailing spaces", + ], + ), + ( + "examples/yamllint/valid.yml", + "yaml", + [], + ), + ( + "examples/yamllint/multi-document.yaml", + "yaml", + [], + ), + ), + ids=( + "invalid", + "valid", + "multi-document", + ), + ) + def test_yamllint(file: str, expected_kind: str, expected: list[str]) -> None: + """Validate parsing of ansible output.""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(YamllintRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == len(expected), results + for idx, result in enumerate(results): + assert result.filename.endswith(file) + assert expected[idx] in result.message + assert isinstance(result.tag, str) + assert result.tag.startswith("yaml[") + + def test_yamllint_has_help(default_rules_collection: RulesCollection) -> None: + """Asserts that we loaded markdown documentation in help property.""" + for collection in default_rules_collection: + if collection.id == "yaml": + assert collection.help is not None + assert len(collection.help) > 100 + break + else: + pytest.fail("No yaml collection found") diff --git a/src/ansiblelint/runner.py b/src/ansiblelint/runner.py new file mode 100644 index 0000000..a98fe51 --- /dev/null +++ b/src/ansiblelint/runner.py @@ -0,0 +1,249 @@ +"""Runner implementation.""" +from __future__ import annotations + +import logging +import multiprocessing +import multiprocessing.pool +import os +from dataclasses import dataclass +from fnmatch import fnmatch +from typing import TYPE_CHECKING, Any, Generator + +import ansiblelint.skip_utils +import ansiblelint.utils +from ansiblelint._internal.rules import LoadingFailureRule +from ansiblelint.constants import States +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, expand_dirs_in_lintables +from ansiblelint.rules.syntax_check import AnsibleSyntaxCheckRule + +if TYPE_CHECKING: + from argparse import Namespace + + from ansiblelint.rules import RulesCollection + +_logger = logging.getLogger(__name__) + + +@dataclass +class LintResult: + """Class that tracks result of linting.""" + + matches: list[MatchError] + files: set[Lintable] + + +class Runner: + """Runner class performs the linting process.""" + + # pylint: disable=too-many-arguments,too-many-instance-attributes + def __init__( + self, + *lintables: Lintable | str, + rules: RulesCollection, + tags: frozenset[Any] = frozenset(), + skip_list: list[str] | None = None, + exclude_paths: list[str] | None = None, + verbosity: int = 0, + checked_files: set[Lintable] | None = None, + project_dir: str | None = None, + ) -> None: + """Initialize a Runner instance.""" + self.rules = rules + self.lintables: set[Lintable] = set() + self.project_dir = os.path.abspath(project_dir) if project_dir else None + + if skip_list is None: + skip_list = [] + if exclude_paths is None: + exclude_paths = [] + + # Assure consistent type + for item in lintables: + if not isinstance(item, Lintable): + item = Lintable(item) + self.lintables.add(item) + + # Expand folders (roles) to their components + expand_dirs_in_lintables(self.lintables) + + self.tags = tags + self.skip_list = skip_list + self._update_exclude_paths(exclude_paths) + self.verbosity = verbosity + if checked_files is None: + checked_files = set() + self.checked_files = checked_files + + def _update_exclude_paths(self, exclude_paths: list[str]) -> None: + if exclude_paths: + # These will be (potentially) relative paths + paths = ansiblelint.file_utils.expand_paths_vars(exclude_paths) + # Since ansiblelint.utils.find_children returns absolute paths, + # and the list of files we create in `Runner.run` can contain both + # relative and absolute paths, we need to cover both bases. + self.exclude_paths = paths + [os.path.abspath(p) for p in paths] + else: + self.exclude_paths = [] + + def is_excluded(self, lintable: Lintable) -> bool: + """Verify if a file path should be excluded.""" + # Any will short-circuit as soon as something returns True, but will + # be poor performance for the case where the path under question is + # not excluded. + + # Exclusions should be evaluated only using absolute paths in order + # to work correctly. + abs_path = str(lintable.abspath) + if self.project_dir and not abs_path.startswith(self.project_dir): + _logger.debug( + "Skipping %s as it is outside of the project directory.", abs_path + ) + return True + + return any( + abs_path.startswith(path) + or lintable.path.match(path) + or fnmatch(str(abs_path), path) + or fnmatch(str(lintable), path) + for path in self.exclude_paths + ) + + def run(self) -> list[MatchError]: # noqa: C901 + """Execute the linting process.""" + files: list[Lintable] = [] + matches: list[MatchError] = [] + + # remove exclusions + for lintable in self.lintables.copy(): + if self.is_excluded(lintable): + _logger.debug("Excluded %s", lintable) + self.lintables.remove(lintable) + continue + if isinstance(lintable.data, States) and lintable.exc: + matches.append( + MatchError( + filename=lintable, + message=str(lintable.exc), + details=str(lintable.exc.__cause__), + rule=LoadingFailureRule(), + ) + ) + lintable.stop_processing = True + + # -- phase 1 : syntax check in parallel -- + def worker(lintable: Lintable) -> list[MatchError]: + # pylint: disable=protected-access + return AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + + # playbooks: List[Lintable] = [] + for lintable in self.lintables: + if lintable.kind != "playbook" or lintable.stop_processing: + continue + files.append(lintable) + + # avoid resource leak warning, https://github.com/python/cpython/issues/90549 + # pylint: disable=unused-variable + global_resource = multiprocessing.Semaphore() + + pool = multiprocessing.pool.ThreadPool(processes=multiprocessing.cpu_count()) + return_list = pool.map(worker, files, chunksize=1) + pool.close() + pool.join() + for data in return_list: + matches.extend(data) + + # -- phase 2 --- + if not matches: + # do our processing only when ansible syntax check passed in order + # to avoid causing runtime exceptions. Our processing is not as + # resilient to be able process garbage. + matches.extend(self._emit_matches(files)) + + # remove duplicates from files list + files = [value for n, value in enumerate(files) if value not in files[:n]] + + for file in self.lintables: + if file in self.checked_files or not file.kind: + continue + _logger.debug( + "Examining %s of type %s", + ansiblelint.file_utils.normpath(file.path), + file.kind, + ) + + matches.extend( + self.rules.run(file, tags=set(self.tags), skip_list=self.skip_list) + ) + + # update list of checked files + self.checked_files.update(self.lintables) + + # remove any matches made inside excluded files + matches = list( + filter( + lambda match: not self.is_excluded(Lintable(match.filename)) + and hasattr(match, "lintable") + and match.tag not in match.lintable.line_skips[match.linenumber], + matches, + ) + ) + + return sorted(set(matches)) + + def _emit_matches(self, files: list[Lintable]) -> Generator[MatchError, None, None]: + visited: set[Lintable] = set() + while visited != self.lintables: + for lintable in self.lintables - visited: + try: + for child in ansiblelint.utils.find_children(lintable): + if self.is_excluded(child): + continue + self.lintables.add(child) + files.append(child) + except MatchError as exc: + if not exc.filename: # pragma: no branch + exc.filename = str(lintable.path) + exc.rule = LoadingFailureRule() + yield exc + except AttributeError: + yield MatchError(filename=lintable, rule=LoadingFailureRule()) + visited.add(lintable) + + +def _get_matches(rules: RulesCollection, options: Namespace) -> LintResult: + lintables = ansiblelint.utils.get_lintables(opts=options, args=options.lintables) + + for rule in rules: + if "unskippable" in rule.tags: + for entry in (*options.skip_list, *options.warn_list): + if rule.id == entry or entry.startswith(f"{rule.id}["): + raise RuntimeError( + f"Rule '{rule.id}' is unskippable, you cannot use it in 'skip_list' or 'warn_list'. Still, you could exclude the file." + ) + matches = [] + checked_files: set[Lintable] = set() + runner = Runner( + *lintables, + rules=rules, + tags=options.tags, + skip_list=options.skip_list, + exclude_paths=options.exclude_paths, + verbosity=options.verbosity, + checked_files=checked_files, + project_dir=options.project_dir, + ) + matches.extend(runner.run()) + + # Assure we do not print duplicates and the order is consistent + matches = sorted(set(matches)) + + # Convert reported filenames into human readable ones, so we hide the + # fact we used temporary files when processing input from stdin. + for match in matches: + for lintable in lintables: + if match.filename == lintable.filename: + match.filename = lintable.name + break + + return LintResult(matches=matches, files=checked_files) diff --git a/src/ansiblelint/schemas/README.md b/src/ansiblelint/schemas/README.md new file mode 100644 index 0000000..db1a554 --- /dev/null +++ b/src/ansiblelint/schemas/README.md @@ -0,0 +1,102 @@ +# Schemas for Ansible and its related tools + +[![ci](https://github.com/ansible-community/schemas/actions/workflows/task.yml/badge.svg)](https://github.com/ansible-community/schemas/actions/workflows/task.yml) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Repository License: MIT](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE) + +## About Schemas + +This project aims to generate JSON/YAML validation schemas for Ansible files +such as playbooks, tasks, requirements, meta or vars and also for Molecule +configuration. + +Keep in mind that these schemas will limit your freedom of choice regarding the +syntax you can use to write Ansible tasks as they do not allow some historical +forms which are still allowed by Ansible itself. + +Not any file accepted by Ansible will pass these schemas but we do expect that +any file that passed these schemas should be accepted by Ansible. + +- YAML 1.2 booleans are required as `true` or `false`, while Ansible itself + allows you to use more relaxed forms like `yes` or `no`. +- Inline actions are not allowed, as schema cannot validate them +- Non-built-in modules must be called using `action:` blocks +- Module arguments are not yet verified but we plan to implement it +- Out schemas are strict about usage of jinja2 templating and require `{{` on + arguments declared as **explicit**, which forbid the use of `{{` on those + marked as **implicit**. See the section below for details. + +As these schemas are still experimental, creating pull requests to improve the +schema is of much greater help. Though you are still welcome to report bugs but +expect them to take a long time until someone finds time to fix them. + +If you want to help improve the schemas, have a look at the +[development documentation](CONTRIBUTING.md). + +## Schema Bundle + +We are currently migrating towards a single [ansible.json](/f/ansible.json) +schema bundle, one that contains subschema definitions for all the supported +file types. + +To configure your validator or editor to use the bundle, use the new URLs below, +the part after the `#` in the URLs is essential for informing the loader about +which subschema to use. You can also look at our +[settings.json](.vscode/settings.json) to understand how to configure the +[vscode-yaml](https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml) +extension. + +- [playbook subschema url](https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible.json#/$defs/playbook) +- [tasks subschema uri](https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible.json#/$defs/tasks) + +## Jinja2 implicit vs explicit templating + +While Ansible might allow you to combine implicit and explicit templating, our +schema will not. Our schemas will only allow you to use the recommended form, +either by forbidding you to use the curly braces on implicit ones or forcing you +to add them on explicit ones. + +Examples: + +```yaml +- name: some task + command: echo 123 + register: result + vars: + become_method_var: sudo + become_method: become_method_var # <-- schema will not allow this + # become_method: "{{ become_method_var }}" # <-- that is allowed +``` + +### How to find if a field is implicit or explicit? + +Run assuming that your keyword is `no_log`, you can run +`ansible-doc -t keyword no_log`, which will give you the following output: + +```yaml +failed_when: + applies_to: + - Task + description: + Conditional expression that overrides the task's normal 'failed' status. + priority: 0 + template: implicit + type: list +``` + +As you can see the `template` field tells you if is implicit or explicit. + +Being more restrictive, schema protects you from common accidents, like writing +a simple string in an explicit field. That will always evaluate as true instead +of being evaluated as a jinja template. + +## Activating the schemas + +At this moment installing +[Ansible VS Code Extension by Red Hat](https://marketplace.visualstudio.com/items?itemName=redhat.ansible) +will activate these schemas. The file patterns used to trigger their use can be +seen +[here](https://github.com/ansible-community/vscode-ansible/blob/master/package.json#L44-L94) + +Because these schemas are generic, you can easily use them with any validators +that support them. diff --git a/src/ansiblelint/schemas/__init__.py b/src/ansiblelint/schemas/__init__.py new file mode 100644 index 0000000..bc93fe4 --- /dev/null +++ b/src/ansiblelint/schemas/__init__.py @@ -0,0 +1,4 @@ +"""Module containing cached JSON schemas.""" +from ansiblelint.schemas.main import JSON_SCHEMAS, refresh_schemas, validate_file_schema + +__all__ = ("JSON_SCHEMAS", "refresh_schemas", "validate_file_schema") diff --git a/src/ansiblelint/schemas/__main__.py b/src/ansiblelint/schemas/__main__.py new file mode 100644 index 0000000..301de7d --- /dev/null +++ b/src/ansiblelint/schemas/__main__.py @@ -0,0 +1,13 @@ +"""Module containing cached JSON schemas.""" +import sys + +from ansiblelint.schemas.main import refresh_schemas + +if __name__ == "__main__": + if refresh_schemas(): # pragma: no cover + # flake8: noqa: T201 + print("Schemas were updated.") + sys.exit(1) + else: # pragma: no cover + # flake8: noqa: T201 + print("Schemas not updated", 0) diff --git a/src/ansiblelint/schemas/__store__.json b/src/ansiblelint/schemas/__store__.json new file mode 100644 index 0000000..3150734 --- /dev/null +++ b/src/ansiblelint/schemas/__store__.json @@ -0,0 +1,58 @@ +{ + "ansible-lint-config": { + "etag": "45ec120948f291620e297af0d75625ceb79d9295e2ec8b31652948c31ceeb209", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-lint-config.json" + }, + "ansible-navigator-config": { + "etag": "c1038568788867f861cdba2aefde88a71b8ab1a6d874ecfa84eb14c110787677", + "url": "https://raw.githubusercontent.com/ansible/ansible-navigator/main/src/ansible_navigator/data/ansible-navigator.json" + }, + "arg_specs": { + "etag": "bd98c32fe4b9672bdadb85efd0dbfded7ef08b6cfda5c0ff91fb1cf45e274e0e", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/arg_specs.json" + }, + "changelog": { + "etag": "f433b08eb8b9c5e358e9e479a79912da4a0f601077b8be52378526c27bc1c13b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/changelog.json" + }, + "execution-environment": { + "etag": "17ebd7426f2f31e362f7e0eae6683fbb17b83983bc0fdfc2cdf5c83e1ed38808", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/execution-environment.json" + }, + "galaxy": { + "etag": "90d0beb8a8ec0fc9ebdc146f3d19f1566c648ed5574b381ed63e06cb15de4d37", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/galaxy.json" + }, + "inventory": { + "etag": "47d61f6b6f9f84b32414245ae1d0800cffe4ba8adc0a1307dfbdab8d195af3e6", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json" + }, + "meta": { + "etag": "090cc0a998e0251c48ecf91b62607fb93cc75cf8553545a4fa26fa808873d236", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta.json" + }, + "meta-runtime": { + "etag": "c1633dcafd016a44e2aba7574136983fef3da2bbb74eeb61785424081e8a82a8", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta-runtime.json" + }, + "molecule": { + "etag": "a61d2a529f04686f0c7e171e50ab6182798252fba5d955a991134ab5b5c742f8", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/molecule.json" + }, + "playbook": { + "etag": "f49882815c54f8595c4e2b9cbcf8f48b1bf5dee892b4cf8938d13343c8448d7b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/playbook.json" + }, + "requirements": { + "etag": "a11edf24f416043f2da8dd329f1d61338fc9708e017bd3cbe43d8c06e4b30090", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/requirements.json" + }, + "tasks": { + "etag": "7725d87e98752a96967cce8a62bc8593c15a85582f1cdefef6afce61a6a9bbe5", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/tasks.json" + }, + "vars": { + "etag": "09aba62c089e2c56d414919979d3cc055c8983d721fb18e2f612c1bee028e53b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/vars.json" + } +} diff --git a/src/ansiblelint/schemas/ansible-lint-config.json b/src/ansiblelint/schemas/ansible-lint-config.json new file mode 100644 index 0000000..ad5373c --- /dev/null +++ b/src/ansiblelint/schemas/ansible-lint-config.json @@ -0,0 +1,256 @@ +{ + "$defs": { + "rule": { + "additionalProperties": false, + "properties": { + "exclude_paths": { + "items": { + "type": "string" + }, + "title": "Glob-like paths to be excluded.", + "type": "array" + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-lint-config.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": [".ansible-lint", ".config/ansible-lint.yml"], + "properties": { + "display_relative_path": { + "default": true, + "title": "Configure how to display file paths", + "type": "boolean" + }, + "enable_list": { + "items": { + "type": "string" + }, + "title": "Enable List", + "type": "array" + }, + "exclude_paths": { + "items": { + "type": "string" + }, + "title": "Exclude Paths", + "type": "array" + }, + "extra_vars": { + "title": "Extra Vars", + "type": "object" + }, + "kinds": { + "items": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "title": "Kinds", + "type": "array" + }, + "loop_var_prefix": { + "title": "Loop Var Prefix", + "type": "string" + }, + "mock_modules": { + "items": { + "type": "string" + }, + "title": "Mock Modules", + "type": "array" + }, + "mock_roles": { + "items": { + "type": "string" + }, + "title": "Mock Roles", + "type": "array" + }, + "offline": { + "default": false, + "title": "Offline", + "type": "boolean" + }, + "only_builtins_allow_collections": { + "items": { + "type": "string" + }, + "title": "Only Builtins Allow Collections", + "type": "array" + }, + "only_builtins_allow_modules": { + "items": { + "type": "string" + }, + "title": "Only Builtins Allow Modules", + "type": "array" + }, + "parseable": { + "default": true, + "title": "Parseable", + "type": "boolean" + }, + "profile": { + "enum": [ + "min", + "basic", + "moderate", + "safety", + "shared", + "production", + null + ], + "title": "Profile", + "type": ["null", "string"] + }, + "progressive": { + "default": false, + "title": "Progressive", + "type": "boolean" + }, + "quiet": { + "default": true, + "title": "Quiet", + "type": "boolean" + }, + "rules": { + "additionalProperties": { + "$ref": "#/$defs/rule" + }, + "propertyNames": { + "enum": [ + "command-instead-of-module", + "command-instead-of-shell", + "deprecated-bare-vars", + "deprecated-command-syntax", + "deprecated-local-action", + "deprecated-module", + "empty-string-compare", + "fqcn", + "fqcn[action-core]", + "fqcn[action-redirect]", + "fqcn[action]", + "galaxy", + "galaxy[no-changelog]", + "galaxy[tags]", + "galaxy[version-incorrect]", + "galaxy[version-missing]", + "ignore-errors", + "inline-env-var", + "internal-error", + "jinja", + "key-order", + "latest", + "literal-compare", + "load-failure", + "meta-incorrect", + "meta-no-info", + "meta-no-tags", + "meta-runtime", + "meta-video-links", + "name", + "no-changed-when", + "no-handler", + "no-jinja-when", + "no-log-password", + "loop-var-prefix", + "no-prompting", + "no-relative-paths", + "no-same-owner", + "no-tabs", + "only-builtins", + "package-latest", + "parser-error", + "partial-become", + "playbook-extension", + "risky-file-permissions", + "risky-octal", + "risky-shell-pipe", + "role-name", + "schema", + "syntax-check", + "var-naming", + "yaml" + ] + }, + "title": "Rules specific configuration.", + "type": "object" + }, + "rulesdir": { + "items": { + "type": "string" + }, + "title": "Rulesdir", + "type": "array" + }, + "sarif_file": { + "default": null, + "title": "SARIF Output filename", + "type": ["null", "string"] + }, + "skip_action_validation": { + "default": false, + "title": "Skip Action Validation", + "type": "boolean" + }, + "skip_list": { + "items": { + "type": "string" + }, + "title": "Skip List", + "type": "array" + }, + "strict": { + "default": false, + "title": "Strict", + "type": "boolean" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": "array" + }, + "task_name_prefix": { + "default": "{stem} | ", + "title": "Allow custom prefix for task[prefix]", + "type": "string" + }, + "use_default_rules": { + "default": true, + "title": "Use Default Rules", + "type": "boolean" + }, + "var_naming_pattern": { + "default": "^[a-z_][a-z0-9_]*$", + "title": "Regex used to verify variable names", + "type": "string" + }, + "verbosity": { + "default": 0, + "title": "Verbosity", + "type": "integer" + }, + "warn_list": { + "items": { + "type": "string" + }, + "title": "Warn List", + "type": "array" + }, + "write_list": { + "items": { + "type": "string" + }, + "title": "Write List", + "type": "array" + } + }, + "title": "Ansible-lint Configuration Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/ansible-navigator-config.json b/src/ansiblelint/schemas/ansible-navigator-config.json new file mode 100644 index 0000000..5acc39c --- /dev/null +++ b/src/ansiblelint/schemas/ansible-navigator-config.json @@ -0,0 +1,517 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "properties": { + "ansible-navigator": { + "additionalProperties": false, + "properties": { + "ansible": { + "additionalProperties": false, + "properties": { + "cmdline": { + "description": "Extra parameters passed to the corresponding command", + "type": "string" + }, + "config": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-config command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "path": { + "description": "Specify the path to the ansible configuration file", + "type": "string" + } + } + }, + "doc": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-doc command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "plugin": { + "additionalProperties": false, + "properties": { + "name": { + "description": "Specify the plugin name", + "type": "string" + }, + "type": { + "default": "module", + "description": "Specify the plugin type, 'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'module', 'netconf', 'shell', 'strategy' or 'vars'", + "enum": [ + "become", + "cache", + "callback", + "cliconf", + "connection", + "httpapi", + "inventory", + "lookup", + "module", + "netconf", + "shell", + "strategy", + "vars" + ], + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "inventory": { + "additionalProperties": false, + "properties": { + "entries": { + "description": "Specify an inventory file path or comma separated host list", + "items": { + "type": "string" + }, + "type": "array" + }, + "help": { + "default": false, + "description": "Help options for ansible-inventory command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + } + } + }, + "playbook": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-playbook command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "path": { + "description": "Specify the playbook name", + "type": "string" + } + } + } + }, + "type": "object" + }, + "ansible-builder": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-builder command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "workdir": { + "default": ".", + "description": "Specify the path that contains ansible-builder manifest files", + "type": "string" + } + }, + "type": "object" + }, + "ansible-lint": { + "additionalProperties": false, + "properties": { + "config": { + "description": "Specify the path to the ansible-lint configuration file", + "type": "string" + }, + "lintables": { + "description": "Path to files on which to run ansible-lint", + "type": "string" + } + }, + "type": "object" + }, + "ansible-runner": { + "additionalProperties": false, + "properties": { + "artifact-dir": { + "description": "The directory path to store artifacts generated by ansible-runner", + "type": "string" + }, + "rotate-artifacts-count": { + "description": "Keep ansible-runner artifact directories, for last n runs, if set to 0 artifact directories won't be deleted", + "type": "integer" + }, + "timeout": { + "description": "The timeout value after which ansible-runner will forcefully stop the execution", + "type": "integer" + } + }, + "type": "object" + }, + "app": { + "default": "welcome", + "description": "Subcommands", + "enum": [ + "builder", + "collections", + "config", + "doc", + "exec", + "images", + "inventory", + "lint", + "replay", + "run", + "settings", + "welcome" + ], + "type": "string" + }, + "collection-doc-cache-path": { + "default": "~/.cache/ansible-navigator/collection_doc_cache.db", + "description": "The path to collection doc cache", + "type": "string" + }, + "color": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable the use of color for mode interactive and stdout", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "osc4": { + "default": true, + "description": "Enable or disable terminal color changing support with OSC 4", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "editor": { + "additionalProperties": false, + "properties": { + "command": { + "default": "vi +{line_number} {filename}", + "description": "Specify the editor command", + "type": "string" + }, + "console": { + "default": true, + "description": "Specify if the editor is console based", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "enable-prompts": { + "default": false, + "description": "Enable prompts for password and in playbooks. This will set mode to stdout and disable playbook artifact creation", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "exec": { + "additionalProperties": false, + "properties": { + "command": { + "default": "/bin/bash", + "description": "Specify the command to run within the execution environment", + "type": "string" + }, + "shell": { + "default": true, + "description": "Specify the exec command should be run in a shell", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "execution-environment": { + "additionalProperties": false, + "properties": { + "container-engine": { + "default": "auto", + "description": "Specify the container engine (auto=podman then docker)", + "enum": [ + "auto", + "podman", + "docker" + ], + "type": "string" + }, + "container-options": { + "description": "Extra parameters passed to the container engine command", + "items": { + "type": "string" + }, + "type": "array" + }, + "enabled": { + "default": true, + "description": "Enable or disable the use of an execution environment", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "environment-variables": { + "additionalProperties": false, + "properties": { + "pass": { + "description": "Specify an existing environment variable to be passed through to and set within the execution environment (--penv MY_VAR)", + "items": { + "type": "string" + }, + "type": "array" + }, + "set": { + "description": "Specify an environment variable and a value to be set within the execution environment (--senv MY_VAR=42)", + "type": "object" + } + }, + "type": "object" + }, + "image": { + "description": "Specify the name of the execution environment image", + "type": "string" + }, + "pull": { + "additionalProperties": false, + "properties": { + "arguments": { + "description": "Specify any additional parameters that should be added to the pull command when pulling an execution environment from a container registry. e.g. --pa='--tls-verify=false'", + "items": { + "type": "string" + }, + "type": "array" + }, + "policy": { + "default": "tag", + "description": "Specify the image pull policy always:Always pull the image, missing:Pull if not locally available, never:Never pull the image, tag:if the image tag is 'latest', always pull the image, otherwise pull if not locally available", + "enum": [ + "always", + "missing", + "never", + "tag" + ], + "type": "string" + } + } + }, + "volume-mounts": { + "additionalProperties": false, + "description": "Specify volume to be bind mounted within an execution environment (--eev /home/user/test:/home/user/test:Z)", + "items": { + "additionalProperties": false, + "properties": { + "dest": { + "type": "string" + }, + "options": { + "type": "string" + }, + "src": { + "type": "string" + } + }, + "required": [ + "src", + "dest" + ], + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "format": { + "default": "yaml", + "description": "Specify the format for stdout output.", + "enum": [ + "json", + "yaml" + ], + "type": "string" + }, + "images": { + "additionalProperties": false, + "properties": { + "details": { + "default": [ + "everything" + ], + "description": "Provide detailed information about the selected execution environment image", + "items": { + "enum": [ + "ansible_collections", + "ansible_version", + "everything", + "os_release", + "python_packages", + "python_version", + "redhat_release", + "system_packages" + ], + "type": "string" + }, + "type": "array" + } + } + }, + "inventory-columns": { + "description": "Specify a host attribute to show in the inventory view", + "items": { + "type": "string" + }, + "type": "array" + }, + "logging": { + "additionalProperties": false, + "properties": { + "append": { + "default": true, + "description": "Specify if log messages should be appended to an existing log file, otherwise a new log file will be created per session", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "file": { + "default": "./ansible-navigator.log", + "description": "Specify the full path for the ansible-navigator log file", + "type": "string" + }, + "level": { + "default": "warning", + "description": "Specify the ansible-navigator log level", + "enum": [ + "debug", + "info", + "warning", + "error", + "critical" + ], + "type": "string" + } + }, + "type": "object" + }, + "mode": { + "default": "interactive", + "description": "Specify the user-interface mode", + "enum": [ + "stdout", + "interactive" + ], + "type": "string" + }, + "playbook-artifact": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable or disable the creation of artifacts for completed playbooks. Note: not compatible with '--mode stdout' when playbooks require user input", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "replay": { + "description": "Specify the path for the playbook artifact to replay", + "type": "string" + }, + "save-as": { + "default": "{playbook_dir}/{playbook_name}-artifact-{time_stamp}.json", + "description": "Specify the name for artifacts created from completed playbooks. The following placeholders are available: {playbook_dir}, {playbook_name}, {playbook_status}, and {time_stamp}", + "type": "string" + } + }, + "type": "object" + }, + "settings": { + "additionalProperties": false, + "properties": { + "effective": { + "default": false, + "description": "Show the effective settings. Defaults, CLI parameters, environment variables, and the settings file will be combined", + "type": "boolean" + }, + "sample": { + "default": false, + "description": "Generate a sample settings file", + "type": "boolean" + }, + "schema": { + "default": "json", + "description": "Generate a schema for the settings file ('json'= draft-07 JSON Schema)", + "enum": [ + "json" + ], + "type": "string" + }, + "sources": { + "default": false, + "description": "Show the source of each current settings entry", + "type": "boolean" + } + } + }, + "time-zone": { + "default": "UTC", + "description": "Specify the IANA time zone to use or 'local' to use the system time zone", + "type": "string" + } + } + } + }, + "required": [ + "ansible-navigator" + ], + "title": "ansible-navigator settings v2.2", + "type": "object", + "version": "2.2" +} diff --git a/src/ansiblelint/schemas/ansible-navigator.json b/src/ansiblelint/schemas/ansible-navigator.json new file mode 100644 index 0000000..be83649 --- /dev/null +++ b/src/ansiblelint/schemas/ansible-navigator.json @@ -0,0 +1,430 @@ +{ + "$defs": { + "AnsibleBuilderModel": { + "additionalProperties": false, + "properties": { + "workdir": { + "default": "/tmp/", + "description": "Specify the path that contains ansible-builder manifest files", + "title": "Workdir", + "type": "string" + } + }, + "type": "object" + }, + "AnsibleModel": { + "additionalProperties": false, + "properties": { + "cmdline": { + "description": "Extra parameters passed to the corresponding command", + "title": "Cmdline", + "type": "string" + }, + "config": { + "description": "Specify the path to the ansible configuration file", + "title": "Config", + "type": "string" + }, + "inventories": { + "description": "Specify an inventory file path or host list", + "items": { + "type": "string" + }, + "title": "Inventories", + "type": "array" + }, + "playbook": { + "description": "Specify the playbook name", + "title": "Playbook", + "type": "string" + } + }, + "title": "AnsibleModel", + "type": "object" + }, + "AnsibleNavigatorModel": { + "additionalProperties": false, + "properties": { + "ansible": { + "$ref": "#/$defs/AnsibleModel" + }, + "ansible-builder": { + "$ref": "#/$defs/AnsibleBuilderModel" + }, + "ansible-runner": { + "$ref": "#/$defs/AnsibleRunnerModel" + }, + "app": { + "default": "welcome", + "description": "Subcommands", + "enum": [ + "collections", + "config", + "doc", + "exec", + "images", + "inventory", + "replay", + "run", + "welcome" + ], + "title": "App", + "type": "string" + }, + "collection-doc-cache-path": { + "default": "$HOME/.cache/ansible-navigator/collection_doc_cache.db", + "description": "The path to collection doc cache", + "title": "Collection-Doc-Cache-Path", + "type": "string" + }, + "color": { + "$ref": "#/$defs/ColorModel" + }, + "documentation": { + "$ref": "#/$defs/DocumentationModel" + }, + "editor": { + "$ref": "#/$defs/EditorModel" + }, + "exec": { + "$ref": "#/$defs/ExecModel" + }, + "execution-environment": { + "$ref": "#/$defs/ExecutionEnvironmentModel" + }, + "help-builder": { + "default": false, + "description": "Help options for ansible-builder command in stdout mode", + "title": "Help-Builder", + "type": "boolean" + }, + "help-config": { + "default": false, + "description": "Help options for ansible-config command in stdout mode", + "title": "Help-Config", + "type": "boolean" + }, + "help-doc": { + "default": false, + "description": "Help options for ansible-doc command in stdout mode", + "title": "Help-Doc", + "type": "boolean" + }, + "help-inventory": { + "default": false, + "description": "Help options for ansible-inventory command in stdout mode", + "title": "Help-Inventory", + "type": "boolean" + }, + "help-playbook": { + "default": false, + "description": "Help options for ansible-playbook command in stdout mode", + "title": "Help-Playbook", + "type": "boolean" + }, + "inventory-columns": { + "description": "Specify a host attribute to show in the inventory view", + "items": { + "type": "string" + }, + "title": "Inventory-Columns", + "type": "array" + }, + "logging": { + "$ref": "#/$defs/LoggingModel" + }, + "mode": { + "default": "interactive", + "description": "Specify the user-interface mode", + "enum": ["stdout", "interactive"], + "title": "Mode", + "type": "string" + }, + "playbook-artifact": { + "$ref": "#/$defs/PlaybookArtifactModel" + } + }, + "title": "AnsibleNavigatorModel", + "type": "object" + }, + "AnsibleRunnerModel": { + "additionalProperties": false, + "properties": { + "artifact-dir": { + "description": "The directory path to store artifacts generated by ansible-runner", + "title": "Artifact-Dir", + "type": "string" + }, + "rotate-artifacts-count": { + "description": "Keep ansible-runner artifact directories, for last n runs, if set to 0 artifact directories won't be deleted", + "title": "Rotate-Artifacts-Count", + "type": "integer" + }, + "timeout": { + "description": "The timeout value after which ansible-runner will force stop the execution", + "title": "Timeout", + "type": "integer" + } + }, + "title": "AnsibleRunnerModel", + "type": "object" + }, + "ColorModel": { + "additionalProperties": false, + "properties": { + "enable": { + "default": false, + "description": "Enable the use of color in the display", + "title": "Enable", + "type": "boolean" + }, + "osc4": { + "default": true, + "description": "Enable or disable terminal color changing support with OSC 4", + "title": "Osc4", + "type": "boolean" + } + }, + "title": "ColorModel", + "type": "object" + }, + "DocumentationModel": { + "additionalProperties": false, + "properties": { + "plugin": { + "$ref": "#/$defs/PluginModel" + } + }, + "title": "DocumentationModel", + "type": "object" + }, + "EditorModel": { + "additionalProperties": false, + "properties": { + "command": { + "default": "vi +{line_number} {filename}", + "description": "Specify the editor command", + "title": "Command", + "type": "string" + }, + "console": { + "default": true, + "description": "Specify if the editor is console based", + "title": "Console", + "type": "boolean" + } + }, + "title": "EditorModel", + "type": "object" + }, + "EnvironmentVariablesModel": { + "additionalProperties": false, + "properties": { + "pass": { + "description": "Specify an exiting environment variable to be passed through to and set within the execution environment", + "items": { + "type": "string" + }, + "title": "Pass", + "type": "array" + }, + "set": { + "additionalProperties": { + "type": "string" + }, + "description": "Specify an environment variable and a value to be set within the execution environment", + "title": "Set", + "type": "object" + } + }, + "title": "EnvironmentVariablesModel", + "type": "object" + }, + "ExecModel": { + "additionalProperties": false, + "properties": { + "command": { + "default": "/bin/bash", + "description": "Specify the command to run within the execution environment", + "title": "Command", + "type": "string" + }, + "shell": { + "default": true, + "description": "Specify the exec command should be run in a shell", + "title": "Shell", + "type": "boolean" + } + }, + "title": "ExecModel", + "type": "object" + }, + "ExecutionEnvironmentModel": { + "additionalProperties": false, + "properties": { + "container-engine": { + "default": "auto", + "description": "Specify the container engine (auto=podman then docker)", + "enum": ["auto", "podman", "docker"], + "title": "Container-Engine", + "type": "string" + }, + "container-options": { + "description": "Extra parameters passed to the container engine command", + "items": { + "type": "string" + }, + "title": "Container-Options", + "type": "array" + }, + "enabled": { + "default": true, + "description": "Enable or disable the use of an execution environment", + "title": "Enabled", + "type": "boolean" + }, + "environment-variables": { + "$ref": "#/$defs/EnvironmentVariablesModel" + }, + "image": { + "default": "quay.io/ansible/creator-ee:v0.2.0", + "description": "Specify the name of the execution environment image", + "title": "Image", + "type": "string" + }, + "pull-policy": { + "default": "tag", + "description": "Specify the image pull policy.\nalways: Always pull the image\nmissing: Pull if not locally available\nnever: Never pull the image\ntag: if the image tag is 'latest', always pull the image, otherwise pull if not locally available", + "enum": ["always", "missing", "never", "tag"], + "title": "Pull-Policy", + "type": "string" + }, + "volume-mounts": { + "description": "Specify volume to be bind mounted within an execution environment", + "items": { + "$ref": "#/$defs/VolumeMountsModel" + }, + "title": "Volume-Mounts", + "type": "array" + } + }, + "title": "ExecutionEnvironmentModel", + "type": "object" + }, + "LoggingModel": { + "additionalProperties": false, + "properties": { + "append": { + "default": true, + "description": "Specify if log messages should be appended to an existing log file, otherwise a new log file will be created per session", + "title": "Append", + "type": "boolean" + }, + "file": { + "default": "$PWD/ansible-navigator.", + "description": "Specify the full path for the ansible-navigator log file", + "title": "File", + "type": "string" + }, + "level": { + "default": "warning", + "description": "Specify the ansible-navigator log level", + "enum": ["debug", "info", "warning", "error", "critical"], + "title": "Level", + "type": "string" + } + }, + "title": "LoggingModel", + "type": "object" + }, + "PlaybookArtifactModel": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable or disable the creation of artifacts for completed playbooks.\nNote: not compatible with 'mode: stdout' when playbooks require user input", + "title": "Enable", + "type": "boolean" + }, + "replay": { + "description": "Specify the path for the playbook artifact to replay", + "title": "Replay", + "type": "string" + }, + "save-as": { + "default": "{playbook_dir}/{playbook_name}-artifact-{ts_utc}.json", + "description": "Specify the name for artifacts created from completed playbooks", + "title": "Save-As", + "type": "string" + } + }, + "title": "PlaybookArtifactModel", + "type": "object" + }, + "PluginModel": { + "additionalProperties": false, + "properties": { + "name": { + "description": "Specify the plugin name", + "title": "Name", + "type": "string" + }, + "type": { + "default": "module", + "description": "Specify the plugin type", + "enum": [ + "become", + "cache", + "callback", + "cliconf", + "connection", + "httpapi", + "inventory", + "lookup", + "module", + "netconf", + "shell", + "strategy", + "vars" + ], + "title": "Type", + "type": "string" + } + }, + "title": "PluginModel", + "type": "object" + }, + "VolumeMountsModel": { + "additionalProperties": false, + "properties": { + "dest": { + "title": "Dest", + "type": "string" + }, + "label": { + "title": "Label", + "type": "string" + }, + "src": { + "title": "Src", + "type": "string" + } + }, + "required": ["src", "dest"], + "title": "VolumeMountsModel", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-navigator.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["ansible-navigator.yml"], + "properties": { + "ansible-navigator": { + "$ref": "#/$defs/AnsibleNavigatorModel" + } + }, + "required": ["ansible-navigator"], + "title": "Ansible-Navigator Configuration Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/ansible.json b/src/ansiblelint/schemas/ansible.json new file mode 100644 index 0000000..ef61363 --- /dev/null +++ b/src/ansiblelint/schemas/ansible.json @@ -0,0 +1,1178 @@ +{ + "$defs": { + "ansible.builtin.import_playbook": { + "additionalProperties": false, + "oneOf": [ + { + "not": { + "required": ["import_playbook"] + }, + "required": ["ansible.builtin.import_playbook"] + }, + { + "not": { + "required": ["ansible.builtin.import_playbook"] + }, + "required": ["import_playbook"] + } + ], + "patternProperties": { + "^(ansible\\.builtin\\.)?import_playbook$": { + "markdownDescription": "* Includes a file with a list of plays to be executed.\n * Files with a list of plays can only be included at the top level.\n * You cannot use this action inside a play.\n\nSee [import_playbook](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/import_playbook_module.html)", + "title": "Import Playbook", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "type": "object" + }, + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["block"], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "play": { + "additionalProperties": false, + "allOf": [ + { + "not": { + "required": ["ansible.builtin.import_playbook"] + } + }, + { + "not": { + "required": ["import_playbook"] + } + } + ], + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "fact_path": { + "title": "Fact Path", + "type": "string" + }, + "force_handlers": { + "title": "Force Handlers", + "type": "boolean" + }, + "gather_facts": { + "title": "Gather Facts", + "type": "boolean" + }, + "gather_subset": { + "items": { + "anyOf": [ + { + "enum": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ], + "type": "string" + }, + { + "enum": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ], + "type": "string" + } + ] + }, + "title": "Gather Subset", + "type": "array" + }, + "gather_timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Gather Timeout" + }, + "handlers": { + "$ref": "#/$defs/tasks" + }, + "hosts": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Hosts" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "max_fail_percentage": { + "title": "Max Fail Percentage", + "type": "number" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "order": { + "enum": [ + "default", + "sorted", + "reverse_sorted", + "reverse_inventory", + "shuffle" + ], + "title": "Order", + "type": "string" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "post_tasks": { + "$ref": "#/$defs/tasks" + }, + "pre_tasks": { + "$ref": "#/$defs/tasks" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "roles": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/play-role" + }, + { + "type": "string" + } + ] + }, + "markdownDescription": "Roles let you automatically load related vars, files, tasks, handlers, and other Ansible artifacts based on a known file structure. After you group your content in roles, you can easily reuse them and share them with other users.\n See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "title": "Roles", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "serial": { + "anyOf": [ + { + "$ref": "#/$defs/templated-integer-or-percent" + }, + { + "items": { + "$ref": "#/$defs/templated-integer-or-percent" + }, + "type": "array" + } + ], + "markdownDescription": "Integer, percentage or list of those. See [Setting the batch size with serial](https://docs.ansible.com/ansible/latest/user_guide/playbooks_strategies.html#setting-the-batch-size-with-serial)", + "title": "Batch size" + }, + "strategy": { + "title": "Strategy", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "tasks": { + "$ref": "#/$defs/tasks" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "user": { + "title": "Remote User", + "type": "string" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "vars_files": { + "items": { + "type": "string" + }, + "title": "Vars Files", + "type": ["array", "string", "null"] + }, + "vars_prompt": { + "items": { + "$ref": "#/$defs/vars_prompt" + }, + "markdownDescription": "See [vars_prompt](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html)", + "title": "vars_prompt", + "type": "array" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["hosts"], + "title": "play", + "type": "object" + }, + "play-role": { + "markdownDescription": "See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["role"], + "title": "play-role", + "type": "object" + }, + "playbook": { + "examples": ["playbooks/*.yml", "playbooks/*.yaml"], + "items": { + "oneOf": [ + { + "$ref": "#/$defs/ansible.builtin.import_playbook" + }, + { + "$ref": "#/$defs/play" + } + ] + }, + "title": "Ansible Playbook", + "type": "array" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": ["hosts"] + } + }, + { + "not": { + "required": ["tasks"] + } + }, + { + "not": { + "required": ["import_playbook"] + } + }, + { + "not": { + "required": ["block"] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": ["string", "object"] + }, + "loop": { + "title": "Loop", + "type": ["string", "array"] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "tasks": { + "$schema": "http://json-schema.org/draft-07/schema", + "examples": ["tasks/*.yml", "handlers/*.yml"], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": ["array", "null"] + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer-or-percent": { + "oneOf": [ + { + "type": "integer" + }, + { + "pattern": "^\\d+\\.?\\d*%?$", + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "vars_prompt": { + "additionalProperties": false, + "properties": { + "confirm": { + "title": "Confirm", + "type": "boolean" + }, + "default": { + "title": "Default", + "type": "string" + }, + "encrypt": { + "enum": [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", + "md5_crypt", + "bcrypt", + "sha1_crypt", + "sun_md5_crypt", + "sha256_crypt", + "sha512_crypt", + "apr_md5_crypt", + "phpass", + "pbkdf2_digest", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "scram", + "bsd_nthash" + ], + "title": "Encrypt", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "private": { + "default": true, + "title": "Private", + "type": "boolean" + }, + "prompt": { + "title": "Prompt", + "type": "string" + }, + "salt_size": { + "default": 8, + "title": "Salt Size", + "type": "integer" + }, + "unsafe": { + "default": false, + "markdownDescription": "See [unsafe](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html#allowing-special-characters-in-vars-prompt-values)", + "title": "Unsafe", + "type": "boolean" + } + }, + "required": ["name", "prompt"], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansiblelint/main/ansiblelint/schemas/ansible.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": [], + "title": "Ansible Schemas Bundle 22.4", + "type": ["array", "object"] +} diff --git a/src/ansiblelint/schemas/arg_specs.json b/src/ansiblelint/schemas/arg_specs.json new file mode 100644 index 0000000..e5db072 --- /dev/null +++ b/src/ansiblelint/schemas/arg_specs.json @@ -0,0 +1,250 @@ +{ + "$defs": { + "datatype": { + "enum": [ + "str", + "list", + "dict", + "bool", + "int", + "float", + "path", + "raw", + "jsonarg", + "json", + "bytes", + "bits" + ], + "type": "string" + }, + "deprecated_alias": { + "properties": { + "collection_name": { + "type": "string" + }, + "date": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "required": ["name"], + "type": "object" + }, + "entry_point": { + "additionalProperties": false, + "properties": { + "author": { + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "description": { + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "options": { + "additionalProperties": { + "$ref": "#/$defs/option" + }, + "type": "object" + }, + "seealso": { + "items": { + "oneOf": [ + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "module": { + "type": "string" + } + }, + "required": ["module"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "plugin": { + "type": "string" + }, + "plugin_type": { + "type": "string" + } + }, + "required": ["plugin", "plugin_type"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "ref": { + "type": "string" + } + }, + "required": ["description", "ref"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "link": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": ["description", "link", "name"], + "type": "object" + } + ] + }, + "type": "array" + }, + "short_description": { + "type": "string" + }, + "version_added": { + "type": "string" + } + }, + "required": ["options"], + "title": "Entry Point", + "type": "object" + }, + "option": { + "additionalProperties": false, + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "apply_defaults": { + "type": "string" + }, + "deprecated_aliases": { + "items": { + "$ref": "#/$defs/deprecated_alias" + }, + "type": "array" + }, + "markdownDescription": "xxx", + "options": { + "$ref": "#/$defs/option" + }, + "properties": { + "choices": { + "type": "array" + }, + "default": { + "default": "None" + }, + "description": { + "description": "Detailed explanation of what this option does. It should be written in full sentences.", + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "elements": { + "$ref": "#/$defs/datatype" + }, + "fallback": { + "default": "None", + "type": "string" + }, + "no_log": { + "default": false, + "type": "boolean" + }, + "option-name": { + "description": "The name of the option/argument.", + "type": "string" + }, + "options": { + "additionalProperties": { + "$ref": "#/$defs/option" + }, + "type": "object" + }, + "required": { + "default": false, + "type": "boolean" + }, + "type": { + "$ref": "#/$defs/datatype", + "markdownDescription": "See [argument-spec](https://docs.ansible.com/ansible/latest/dev_guide/developing_program_flow_modules.html#argument-spec" + }, + "version_added": { + "type": "string" + } + }, + "removed_at_date": { + "type": "string" + }, + "removed_from_collection": { + "type": "string" + }, + "removed_in_version": { + "type": "string" + }, + "title": "Option" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansiblelint/main/src/ansiblelint/schemas/ansible-argument-specs.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["meta/argument_specs.yml"], + "markdownDescription": "Add entry point, usually `main`.\nSee [role-argument-validation](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation)", + "properties": { + "argument_specs": { + "additionalProperties": { + "$ref": "#/$defs/entry_point" + }, + "markdownDescription": "Add entry point, usually `main`.\nSee [role-argument-validation](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation)" + } + }, + "title": "Ansible Argument Specs Schema" +} diff --git a/src/ansiblelint/schemas/changelog.json b/src/ansiblelint/schemas/changelog.json new file mode 100644 index 0000000..ec0d896 --- /dev/null +++ b/src/ansiblelint/schemas/changelog.json @@ -0,0 +1,262 @@ +{ + "$defs": { + "plugin-descriptions": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from plugin `DOCUMENTATION`.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + }, + "release": { + "additionalProperties": false, + "properties": { + "changes": { + "additionalProperties": false, + "properties": { + "breaking_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "bugfixes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "deprecated_features": { + "items": { + "type": "string" + }, + "type": "array" + }, + "known_issues": { + "items": { + "type": "string" + }, + "type": "array" + }, + "major_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "minor_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "release_summary": { + "markdownDescription": "This must be valid [reStructuredText](https://en.wikipedia.org/wiki/ReStructuredText).", + "title": "Release Summary", + "type": "string" + }, + "removed_features": { + "items": { + "type": "string" + }, + "type": "array" + }, + "security_fixes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "trivial": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "codename": { + "type": "string" + }, + "fragments": { + "items": { + "type": "string" + }, + "markdownDescription": "List of strings representing filenames of changelog framents.", + "type": "array" + }, + "modules": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from plugin `DOCUMENTATION`.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short module name", + "type": "string" + }, + "namespace": { + "markdownDescription": "Must be `''` for modules directly in `plugins/modules/`, or the dot-separated list of directories the module is in inside the `plugins/modules/` directory. The namespace is used to group new modules by their namespace inside the collection.", + "title": "Namespace", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "objects": { + "additionalProperties": false, + "properties": { + "playbook": { + "items": { + "properties": { + "description": { + "markdownDescription": "A short description of what the playbook does.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short playbook name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + }, + "role": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from role's argument spec.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short role name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "plugins": { + "additionalProperties": false, + "properties": { + "become": { + "$ref": "#/$defs/plugin-descriptions" + }, + "cache": { + "$ref": "#/$defs/plugin-descriptions" + }, + "callback": { + "$ref": "#/$defs/plugin-descriptions" + }, + "cliconf": { + "$ref": "#/$defs/plugin-descriptions" + }, + "connections": { + "$ref": "#/$defs/plugin-descriptions" + }, + "filter": { + "$ref": "#/$defs/plugin-descriptions" + }, + "httpapi": { + "$ref": "#/$defs/plugin-descriptions" + }, + "inventory": { + "$ref": "#/$defs/plugin-descriptions" + }, + "lookup": { + "$ref": "#/$defs/plugin-descriptions" + }, + "netconf": { + "$ref": "#/$defs/plugin-descriptions" + }, + "shell": { + "$ref": "#/$defs/plugin-descriptions" + }, + "strategy": { + "$ref": "#/$defs/plugin-descriptions" + }, + "test": { + "$ref": "#/$defs/plugin-descriptions" + }, + "vars": { + "$ref": "#/$defs/plugin-descriptions" + } + }, + "type": "object" + }, + "release_date": { + "format": "date", + "markdownDescription": "Use ISO-8601 date format, like 2020-12-31", + "pattern": "\\d\\d\\d\\d-\\d\\d-\\d\\d", + "title": "Date of the release.", + "type": "string" + } + }, + "type": "object" + }, + "semver": { + "pattern": "\\d+.\\d+.\\d+.*", + "title": "Version string following SemVer specification.", + "type": ["string", "null"] + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/changelog.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["changelogs/changelog.yaml"], + "markdownDescription": "Antsibull Changelog Schema is based on [changelog.yaml-format.md](https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelog.yaml-format.md).", + "properties": { + "ancestor": { + "$ref": "#/$defs/semver" + }, + "releases": { + "patternProperties": { + "\\d+.\\d+.\\d+.*": { + "$ref": "#/$defs/release", + "type": "object" + } + }, + "type": "object" + } + }, + "title": "Antsibull Changelog Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/execution-environment.json b/src/ansiblelint/schemas/execution-environment.json new file mode 100644 index 0000000..405019d --- /dev/null +++ b/src/ansiblelint/schemas/execution-environment.json @@ -0,0 +1,70 @@ +{ + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-ee.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "description": "See https://docs.ansible.com/automation-controller/latest/html/userguide/ee_reference.html", + "examples": ["execution-environment.yml"], + "properties": { + "additional_build_steps": { + "properties": { + "append": { + "examples": ["RUN cat /etc/os-release"], + "type": ["string", "array"] + }, + "prepend": { + "examples": ["RUN cat /etc/os-release"], + "type": ["string", "array"] + } + }, + "title": "Commands to append or prepend to container build process.", + "type": "object" + }, + "ansible_config": { + "examples": ["ansible.cfg"], + "title": "Ansible configuration file", + "type": "string" + }, + "build_arg_defaults": { + "additionalProperties": true, + "properties": { + "EE_BASE_IMAGE": { + "type": "string" + } + }, + "type": "object" + }, + "dependencies": { + "description": "Allows adding system, python or galaxy dependencies.", + "properties": { + "galaxy": { + "examples": ["requirements.yml"], + "markdownDescription": "Example `requirements.yml`", + "title": "Optional galaxy file", + "type": "string" + }, + "python": { + "examples": ["requirements.txt"], + "markdownDescription": "Example `requirements.txt`", + "title": "Optional python package dependencies", + "type": "string" + }, + "system": { + "examples": ["bindep.txt"], + "markdownDescription": "Example `bindep.txt`", + "title": "Optional system dependencies using bindep format", + "type": "string" + } + }, + "title": "Dependencies", + "type": "object" + }, + "version": { + "enum": [1], + "title": "Version", + "type": "integer" + } + }, + "required": ["version", "dependencies"], + "title": "Ansible Execution Environment Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/galaxy.json b/src/ansiblelint/schemas/galaxy.json new file mode 100644 index 0000000..1ff2aa4 --- /dev/null +++ b/src/ansiblelint/schemas/galaxy.json @@ -0,0 +1,549 @@ +{ + "$defs": { + "CollectionVersionConstraintModel": { + "additionalProperties": false, + "title": "CollectionVersionConstraintModel", + "type": "string" + }, + "SPDXLicense": { + "$ref": "#/$defs/SPDXLicenseEnum", + "title": "SPDXLicense" + }, + "SPDXLicenseEnum": { + "description": "An enumeration.", + "enum": [ + "0BSD", + "AAL", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Abstyles", + "Adobe-2006", + "Adobe-Glyph", + "Afmparse", + "Aladdin", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "Artistic-1.0", + "Artistic-1.0-Perl", + "Artistic-1.0-cl8", + "Artistic-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "BUSL-1.1", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "BlueOak-1.0.0", + "Borceux", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "CUA-OPL-1.0", + "Caldera", + "ClArtistic", + "Condor-1.1", + "Crossword", + "CrystalStacker", + "Cube", + "D-FSL-1.0", + "DOC", + "DRL-1.0", + "DSDP", + "Dotseqn", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Entessa", + "ErlPL-1.1", + "Eurosym", + "FSFAP", + "FSFUL", + "FSFULLR", + "FTL", + "Fair", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "GD", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "GL2PS", + "GLWTPL", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "Giftware", + "Glide", + "Glulxe", + "HPND", + "HPND-sell-variant", + "HTMLTIDY", + "HaskellReport", + "Hippocratic-2.1", + "IBM-pibs", + "ICU", + "IJG", + "IPA", + "IPL-1.0", + "ISC", + "ImageMagick", + "Imlib2", + "Info-ZIP", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "JPNIC", + "JSON", + "JasPer-2.0", + "LAL-1.2", + "LAL-1.3", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "Latex2e", + "Leptonica", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Libpng", + "Linux-OpenIB", + "MIT", + "MIT-0", + "MIT-CMU", + "MIT-Modern-Variant", + "MIT-advertising", + "MIT-enna", + "MIT-feh", + "MIT-open-group", + "MITNFA", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-PL", + "MS-RL", + "MTLL", + "MakeIndex", + "MirOS", + "Motosoto", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "NBPL-1.0", + "NCGL-UK-2.0", + "NCSA", + "NGPL", + "NIST-PD", + "NIST-PD-fallback", + "NLOD-1.0", + "NLPL", + "NOSL", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "Naumen", + "Net-SNMP", + "NetCDF", + "Newsletr", + "Nokia", + "Noweb", + "O-UDA-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODC-By-1.0", + "ODbL-1.0", + "OFL-1.0", + "OFL-1.0-RFN", + "OFL-1.0-no-RFN", + "OFL-1.1", + "OFL-1.1-RFN", + "OFL-1.1-no-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OML", + "OPL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "OpenSSL", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "PSF-2.0", + "Parity-6.0.0", + "Parity-7.0.0", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "Python-2.0", + "QPL-1.0", + "Qhull", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Rdisc", + "Ruby", + "SAX-PD", + "SCEA", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SHL-0.5", + "SHL-0.51", + "SISSL", + "SISSL-1.2", + "SMLNJ", + "SMPPL", + "SNIA", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSPL-1.0", + "SWL", + "Saxpath", + "Sendmail", + "Sendmail-8.23", + "SimPL-2.0", + "Sleepycat", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SugarCRM-1.1.3", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TMate", + "TORQUE-1.1", + "TOSL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "UCL-1.0", + "UPL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Unlicense", + "VOSTROM", + "VSL-1.0", + "Vim", + "W3C", + "W3C-19980720", + "W3C-20150513", + "WTFPL", + "Watcom-1.0", + "Wsuipa", + "X11", + "XFree86-1.1", + "XSkat", + "Xerox", + "Xnet", + "YPL-1.0", + "YPL-1.1", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "blessing", + "bzip2-1.0.5", + "bzip2-1.0.6", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "curl", + "diffmark", + "dvipdfm", + "eGenix", + "etalab-2.0", + "gSOAP-1.3b", + "gnuplot", + "iMatix", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "mpich2", + "psfrag", + "psutils", + "xinetd", + "xpp", + "zlib-acknowledgement" + ], + "title": "SPDXLicenseEnum" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-galaxy.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["galaxy.yml"], + "properties": { + "authors": { + "items": { + "type": "string" + }, + "title": "Authors", + "type": "array" + }, + "build_ignore": { + "items": { + "type": "string" + }, + "title": "Build Ignore", + "type": "array" + }, + "dependencies": { + "additionalProperties": { + "$ref": "#/$defs/CollectionVersionConstraintModel" + }, + "title": "Dependencies", + "type": "object" + }, + "description": { + "title": "Description", + "type": "string" + }, + "documentation": { + "title": "Documentation", + "type": "string" + }, + "homepage": { + "title": "Homepage", + "type": "string" + }, + "issues": { + "title": "Issues", + "type": "string" + }, + "license": { + "items": { + "$ref": "#/$defs/SPDXLicense" + }, + "title": "License", + "type": "array" + }, + "license_file": { + "title": "License File", + "type": "string" + }, + "name": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Name", + "type": "string" + }, + "namespace": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Namespace", + "type": "string" + }, + "readme": { + "markdownDescription": "The path to the Markdown (.md) readme file. This path is relative to the root of the collection.\nSee [metadata structure](https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html)", + "title": "Readme", + "type": "string" + }, + "repository": { + "title": "Repository", + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": "array" + }, + "version": { + "markdownDescription": "Version must use [SemVer](https://semver.org/) format, which is more restrictive than [PEP-440](https://peps.python.org/pep-0440/). For example `1.0.0-rc1` is valid but `1.0.0rc` is not.", + "minLength": 5, + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "title": "Version", + "type": "string" + } + }, + "required": [ + "namespace", + "name", + "version", + "readme", + "authors", + "description", + "repository" + ], + "title": "Ansible galaxy.yml Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/inventory.json b/src/ansiblelint/schemas/inventory.json new file mode 100644 index 0000000..2c665e2 --- /dev/null +++ b/src/ansiblelint/schemas/inventory.json @@ -0,0 +1,66 @@ +{ + "$defs": { + "group": { + "properties": { + "children": { + "patternProperties": { + "[a-zA-Z-_0-9]": { + "$ref": "#/$defs/group" + } + } + }, + "hosts": { + "patternProperties": { + "[a-zA-Z.-_0-9]": { + "type": ["object", "null"] + } + }, + "type": ["object", "string"] + }, + "vars": { + "type": "object" + } + }, + "type": ["object", "null"] + }, + "special-group": { + "additionalProperties": false, + "properties": { + "children": { + "type": ["object", "null"] + }, + "groups": { + "type": ["object", "null"] + }, + "hosts": { + "type": ["object", "null"] + }, + "vars": { + "type": ["object", "null"] + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-inventory.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": true, + "description": "Ansible Inventory Schema", + "examples": [ + "inventory.yaml", + "inventory.yml", + "inventory/*.yml", + "inventory/*.yaml" + ], + "markdownDescription": "All keys at top levels are groups with `all` and `ungrouped` having a special meaning.\n\nSee [How to build your inventory](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html)", + "properties": { + "all": { + "$ref": "#/$defs/special-group" + }, + "ungrouped": { + "$ref": "#/$defs/group" + } + }, + "title": "Ansible Inventory Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/main.py b/src/ansiblelint/schemas/main.py new file mode 100644 index 0000000..5a96ce9 --- /dev/null +++ b/src/ansiblelint/schemas/main.py @@ -0,0 +1,141 @@ +"""Module containing cached JSON schemas.""" +from __future__ import annotations + +import json +import logging +import os +import time +import urllib.request +from collections import defaultdict +from functools import lru_cache +from pathlib import Path +from typing import Any +from urllib.request import Request + +import jsonschema +import yaml +from jsonschema.exceptions import ValidationError + +from ansiblelint.file_utils import Lintable +from ansiblelint.loaders import yaml_load_safe + +_logger = logging.getLogger(__package__) + + +class SchemaCacheDict(defaultdict): # type: ignore + """Caching schema store.""" + + def __missing__(self, key: str) -> Any: + """Load schema on its first use.""" + value = get_schema(key) + self[key] = value + return value + + +_schema_cache = SchemaCacheDict() + + +# Maps kinds to JSON schemas +# See https://www.schemastore.org/json/ +store_file = Path(f"{__file__}/../__store__.json").resolve() +with open(store_file, encoding="utf-8") as json_file: + JSON_SCHEMAS = json.load(json_file) + + +@lru_cache(maxsize=None) +def get_schema(kind: str) -> Any: + """Return the schema for the given kind.""" + schema_file = os.path.dirname(__file__) + "/" + kind + ".json" + with open(schema_file, encoding="utf-8") as f: + return json.load(f) + + +def validate_file_schema(file: Lintable) -> list[str]: + """Return list of JSON validation errors found.""" + if file.kind not in JSON_SCHEMAS: + return [f"Unable to find JSON Schema '{file.kind}' for '{file.path}' file."] + try: + # convert yaml to json (keys are converted to strings) + yaml_data = yaml_load_safe(file.content) + json_data = json.loads(json.dumps(yaml_data)) + # file.data = json_data + jsonschema.validate( + instance=json_data, + schema=_schema_cache[file.kind], + ) + except yaml.constructor.ConstructorError as exc: + return [f"Failed to load YAML file '{file.path}': {exc.problem}"] + except ValidationError as exc: + return [exc.message] + return [] + + +# pylint: disable=too-many-branches +def refresh_schemas(min_age_seconds: int = 3600 * 24) -> int: + """Refresh JSON schemas by downloading latest versions. + + Returns number of changed schemas. + """ + age = int(time.time() - store_file.stat().st_mtime) + + # never check for updated schemas more than once a day + if min_age_seconds > age: + return 0 + if not os.access(store_file, os.W_OK): # pragma: no cover + _logger.debug( + "Skipping schema update due to lack of writing rights on %s", store_file + ) + return -1 + _logger.debug("Checking for updated schemas...") + + changed = 0 + for kind, data in JSON_SCHEMAS.items(): + url = data["url"] + if "#" in url: + raise RuntimeError( + f"Schema URLs cannot contain # due to python-jsonschema limitation: {url}" + ) + path = Path(f"{os.path.relpath(os.path.dirname(__file__))}/{kind}.json") + _logger.debug("Refreshing %s schema ...", kind) + request = Request(url) + etag = data.get("etag", "") + if etag: + request.add_header("If-None-Match", f'"{data.get("etag")}"') + try: + with urllib.request.urlopen(request, timeout=10) as response: + if response.status == 200: + content = response.read().decode("utf-8").rstrip() + etag = response.headers["etag"].strip('"') + if etag != data.get("etag", ""): + JSON_SCHEMAS[kind]["etag"] = etag + changed += 1 + with open(f"{path}", "w", encoding="utf-8") as f_out: + _logger.info("Schema %s was updated", kind) + f_out.write(content) + f_out.write("\n") # prettier/editors + f_out.truncate() + os.fsync(f_out.fileno()) + # unload possibly loaded schema + if kind in _schema_cache: # pragma: no cover + del _schema_cache[kind] + except (ConnectionError, OSError) as exc: + if ( + isinstance(exc, urllib.error.HTTPError) + and getattr(exc, "code", None) == 304 + ): + _logger.debug("Schema %s is not modified", url) + continue + # In case of networking issues, we just stop and use last-known good + _logger.debug("Skipped schema refresh due to unexpected exception: %s", exc) + break + if changed: # pragma: no cover + with open(store_file, "w", encoding="utf-8") as f_out: + # formatting should match our .prettierrc.yaml + json.dump(JSON_SCHEMAS, f_out, indent=2, sort_keys=True) + f_out.write("\n") # prettier and editors in general + # clear schema cache + get_schema.cache_clear() + else: + store_file.touch() + changed = 1 + return changed diff --git a/src/ansiblelint/schemas/meta-runtime.json b/src/ansiblelint/schemas/meta-runtime.json new file mode 100644 index 0000000..e2d7aa8 --- /dev/null +++ b/src/ansiblelint/schemas/meta-runtime.json @@ -0,0 +1,82 @@ +{ + "$defs": { + "ActionGroup": { + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "$ref": "#/$defs/Metadata" + } + ] + }, + "type": "array" + }, + "Metadata": { + "properties": { + "metadata": { + "properties": { + "extend_group": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "Redirect": { + "properties": { + "redirect": { + "type": "string" + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-meta-runtime.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "description": "See https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#meta-directory", + "examples": ["**/meta/runtime.yml"], + "properties": { + "action_groups": { + "additionalProperties": { + "$ref": "#/$defs/ActionGroup" + }, + "description": "A mapping of groups and the list of action plugin and module names they contain. They may also have a special ‘metadata’ dictionary in the list, which can be used to include actions from other groups.", + "title": "Action Groups", + "type": "object" + }, + "import_redirection": { + "additionalProperties": { + "$ref": "#/$defs/Redirect" + }, + "description": "A mapping of names for Python import statements and their redirected locations.", + "title": "Import Redirection", + "type": "object" + }, + "plugin_routing": { + "markdownDescription": "Content in a collection that Ansible needs to load from another location or that has been deprecated/removed. The top level keys of plugin_routing are types of plugins, with individual plugin names as subkeys. To define a new location for a plugin, set the redirect field to another name. To deprecate a plugin, use the deprecation field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the redirect field should also be provided. If a plugin is being removed entirely, tombstone can be used for the fatal error message and removal version or date.", + "properties": { + "inventory": {}, + "module_utils": {}, + "modules": {} + }, + "title": "Plugin Routing", + "type": "object" + }, + "requires_ansible": { + "examples": [">=2.10,<2.11"], + "pattern": "^[^\\s]*$", + "title": "The version of Ansible Core (ansible-core) required to use the collection. Multiple versions can be separated with a comma.", + "type": "string" + } + }, + "title": "Ansible Meta Runtime Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/meta.json b/src/ansiblelint/schemas/meta.json new file mode 100644 index 0000000..41cf21a --- /dev/null +++ b/src/ansiblelint/schemas/meta.json @@ -0,0 +1,1381 @@ +{ + "$defs": { + "AIXPlatformModel": { + "properties": { + "name": { + "const": "AIX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["6.1", "7.1", "7.2", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "AIXPlatformModel", + "type": "object" + }, + "AlpinePlatformModel": { + "properties": { + "name": { + "const": "Alpine", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "AlpinePlatformModel", + "type": "object" + }, + "AmazonPlatformModel": { + "properties": { + "name": { + "const": "Amazon", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "2013.03", + "2013.09", + "2014.03", + "2014.09", + "2015.03", + "2015.09", + "2016.03", + "2016.09", + "2017.03", + "2017.09", + "2017.12", + "2018.03", + "Candidate", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "AmazonPlatformModel", + "type": "object" + }, + "Amazon_Linux_2PlatformModel": { + "properties": { + "name": { + "const": "Amazon Linux 2", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "Amazon Linux 2PlatformModel", + "type": "object" + }, + "ArchLinuxPlatformModel": { + "properties": { + "name": { + "const": "ArchLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ArchLinuxPlatformModel", + "type": "object" + }, + "ClearLinuxPlatformModel": { + "properties": { + "name": { + "const": "ClearLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ClearLinuxPlatformModel", + "type": "object" + }, + "CumulusPlatformModel": { + "properties": { + "name": { + "const": "Cumulus", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["2.5", "3.0", "3.1", "3.2", "3.3", "3.4", "3.5", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "CumulusPlatformModel", + "type": "object" + }, + "DebianPlatformModel": { + "properties": { + "name": { + "const": "Debian", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "bookworm", + "bullseye", + "buster", + "etch", + "jessie", + "lenny", + "sid", + "squeeze", + "stretch", + "wheezy", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "DebianPlatformModel", + "type": "object" + }, + "DellOSPlatformModel": { + "properties": { + "name": { + "const": "DellOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "6", "9", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DellOSPlatformModel", + "type": "object" + }, + "DependencyModel": { + "additionalProperties": true, + "anyOf": [ + { + "required": ["role"] + }, + { + "required": ["src"] + }, + { + "required": ["name"] + } + ], + "markdownDescription": "See https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies and https://github.com/ansible/ansible/blob/devel/lib/ansible/playbook/role/metadata.py#L79\n\nOther keys are treated as role [parameters](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#passing-different-parameters).", + "properties": { + "become": { + "title": "Become", + "type": "boolean" + }, + "name": { + "title": "Name", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "scm": { + "enum": ["hg", "git"], + "title": "Scm", + "type": "string" + }, + "src": { + "title": "Src", + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": ["array", "string"] + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "version": { + "title": "Version", + "type": "string" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "title": "Dependency entry", + "type": "object" + }, + "DevuanPlatformModel": { + "properties": { + "name": { + "const": "Devuan", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["ascii", "beowulf", "ceres", "jessie", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DevuanPlatformModel", + "type": "object" + }, + "DragonFlyBSDPlatformModel": { + "properties": { + "name": { + "const": "DragonFlyBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.2", "5.4", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DragonFlyBSDPlatformModel", + "type": "object" + }, + "ELPlatformModel": { + "properties": { + "name": { + "const": "EL", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5", "6", "7", "8", "9", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ELPlatformModel", + "type": "object" + }, + "FedoraPlatformModel": { + "properties": { + "name": { + "const": "Fedora", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "16", + "17", + "18", + "19", + "20", + "21", + "22", + "23", + "24", + "25", + "26", + "27", + "28", + "29", + "30", + "31", + "32", + "33", + "34", + "35", + "36", + "37", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "FedoraPlatformModel", + "type": "object" + }, + "FreeBSDPlatformModel": { + "properties": { + "name": { + "const": "FreeBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10.0", + "10.1", + "10.2", + "10.3", + "10.4", + "11.0", + "11.1", + "11.2", + "11.3", + "11.4", + "12.0", + "12.1", + "12.2", + "13.0", + "8.0", + "8.1", + "8.2", + "8.3", + "8.4", + "9.0", + "9.1", + "9.2", + "9.3", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "FreeBSDPlatformModel", + "type": "object" + }, + "GalaxyInfoModel": { + "additionalProperties": false, + "allOf": [ + { + "if": { + "properties": { + "standalone": { + "const": true + } + } + }, + "then": { + "$comment": "Standalone role, so we require several fields.", + "required": [ + "author", + "description", + "license", + "min_ansible_version" + ] + } + }, + { + "if": { + "properties": { + "standalone": { + "const": false + } + } + }, + "then": { + "$comment": "Collection roles do not use most galaxy fields.", + "not": { + "required": [ + "cloud_platforms", + "galaxy_tags", + "min_ansible_version", + "namespace", + "platforms", + "role_name" + ] + }, + "required": ["description"] + } + } + ], + "else": { + "$comment": "If standalone is false, then we have a collection role and only description is required", + "required": ["description"] + }, + "properties": { + "author": { + "title": "Author", + "type": "string" + }, + "company": { + "title": "Company", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "galaxy_tags": { + "items": { + "type": "string" + }, + "markdownDescription": "See https://galaxy.ansible.com/docs/contributing/creating_role.html", + "title": "Galaxy Tags", + "type": "array" + }, + "github_branch": { + "markdownDescription": "Optionally specify the branch Galaxy will use when accessing the GitHub repo for this role", + "title": "GitHub Branch", + "type": "string" + }, + "issue_tracker_url": { + "title": "Issue Tracker Url", + "type": "string" + }, + "license": { + "title": "License", + "type": "string" + }, + "min_ansible_container_version": { + "title": "Min Ansible Container Version", + "type": "string" + }, + "min_ansible_version": { + "title": "Min Ansible Version", + "type": "string" + }, + "namespace": { + "markdownDescription": "Used by molecule and ansible-lint to compute FQRN for roles outside collections", + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Namespace Name", + "type": "string" + }, + "platforms": { + "$ref": "#/$defs/platforms" + }, + "role_name": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Role Name", + "type": "string" + }, + "standalone": { + "description": "Set to true for old standalone roles, or false for new collection roles.", + "title": "Standalone", + "type": "boolean" + } + }, + "title": "GalaxyInfoModel", + "type": "object" + }, + "GenericBSDPlatformModel": { + "properties": { + "name": { + "const": "GenericBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericBSDPlatformModel", + "type": "object" + }, + "GenericLinuxPlatformModel": { + "properties": { + "name": { + "const": "GenericLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericLinuxPlatformModel", + "type": "object" + }, + "GenericUNIXPlatformModel": { + "properties": { + "name": { + "const": "GenericUNIX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericUNIXPlatformModel", + "type": "object" + }, + "GentooPlatformModel": { + "properties": { + "name": { + "const": "Gentoo", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GentooPlatformModel", + "type": "object" + }, + "HardenedBSDPlatformModel": { + "properties": { + "name": { + "const": "HardenedBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "11", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "HardenedBSDPlatformModel", + "type": "object" + }, + "IOSPlatformModel": { + "properties": { + "name": { + "const": "IOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "IOSPlatformModel", + "type": "object" + }, + "JunosPlatformModel": { + "properties": { + "name": { + "const": "Junos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "JunosPlatformModel", + "type": "object" + }, + "MacOSXPlatformModel": { + "properties": { + "name": { + "const": "MacOSX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10.10", + "10.11", + "10.12", + "10.13", + "10.14", + "10.15", + "10.7", + "10.8", + "10.9", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "MacOSXPlatformModel", + "type": "object" + }, + "MageiaPlatformModel": { + "properties": { + "name": { + "const": "Mageia", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["7", "8", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "MageiaPlatformModel", + "type": "object" + }, + "NXOSPlatformModel": { + "properties": { + "name": { + "const": "NXOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "NXOSPlatformModel", + "type": "object" + }, + "OpenBSDPlatformModel": { + "properties": { + "name": { + "const": "OpenBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "5.6", + "5.7", + "5.8", + "5.9", + "6.0", + "6.1", + "6.2", + "6.3", + "6.4", + "6.5", + "6.6", + "6.7", + "6.8", + "6.9", + "7.0", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "OpenBSDPlatformModel", + "type": "object" + }, + "OpenWrtPlatformModel": { + "properties": { + "name": { + "const": "OpenWrt", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["17.01", "18.06", "19.07", "21.02", "22.03", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "OpenWrtPlatformModel", + "type": "object" + }, + "OracleLinuxPlatformModel": { + "properties": { + "name": { + "const": "OracleLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "8.0", + "8.1", + "8.2", + "8.3", + "8.4", + "8.5", + "8.6", + "8.7", + "9.0", + "9.1", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "OracleLinuxPlatformModel", + "type": "object" + }, + "PAN-OSPlatformModel": { + "properties": { + "name": { + "const": "PAN-OS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["7.1", "8.0", "8.1", "9.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "PAN-OSPlatformModel", + "type": "object" + }, + "SLESPlatformModel": { + "properties": { + "name": { + "const": "SLES", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10SP3", + "10SP4", + "11", + "11SP1", + "11SP2", + "11SP3", + "11SP4", + "12", + "12SP1", + "12SP2", + "12SP3", + "12SP4", + "12SP5", + "15", + "15SP1", + "15SP2", + "15SP3", + "15SP4", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "SLESPlatformModel", + "type": "object" + }, + "SmartOSPlatformModel": { + "properties": { + "name": { + "const": "SmartOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SmartOSPlatformModel", + "type": "object" + }, + "SolarisPlatformModel": { + "properties": { + "name": { + "const": "Solaris", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "11.0", "11.1", "11.2", "11.3", "11.4", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SolarisPlatformModel", + "type": "object" + }, + "SynologyPlatformModel": { + "properties": { + "name": { + "const": "Synology", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["6.0", "6.1", "6.2", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SynologyPlatformModel", + "type": "object" + }, + "TMOSPlatformModel": { + "properties": { + "name": { + "const": "TMOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["12.1", "13.0", "13.1", "14.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "TMOSPlatformModel", + "type": "object" + }, + "UbuntuPlatformModel": { + "properties": { + "name": { + "const": "Ubuntu", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "artful", + "bionic", + "cosmic", + "cuttlefish", + "disco", + "eoan", + "focal", + "groovy", + "hirsute", + "impish", + "jammy", + "lucid", + "maverick", + "natty", + "oneiric", + "precise", + "quantal", + "raring", + "saucy", + "trusty", + "utopic", + "vivid", + "wily", + "xenial", + "yakkety", + "zesty", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "UbuntuPlatformModel", + "type": "object" + }, + "Void_LinuxPlatformModel": { + "properties": { + "name": { + "const": "Void Linux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "Void LinuxPlatformModel", + "type": "object" + }, + "WindowsPlatformModel": { + "properties": { + "name": { + "const": "Windows", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "2008R2", + "2008x64", + "2008x86", + "2012", + "2012R2", + "2016", + "2019", + "2022", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "WindowsPlatformModel", + "type": "object" + }, + "aosPlatformModel": { + "properties": { + "name": { + "const": "aos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "aosPlatformModel", + "type": "object" + }, + "collections": { + "items": { + "markdownDescription": "See [Using collections in roles](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html#using-collections-in-roles) and [collection naming conventions](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules_in_groups.html#naming-conventions)", + "pattern": "^[a-z_]+\\.[a-z_]+$", + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "eosPlatformModel": { + "properties": { + "name": { + "const": "eos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "eosPlatformModel", + "type": "object" + }, + "macOSPlatformModel": { + "properties": { + "name": { + "const": "macOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "Big-Sur", + "Catalina", + "High-Sierra", + "Mojave", + "Monterey", + "Sierra", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "macOSPlatformModel", + "type": "object" + }, + "opensusePlatformModel": { + "properties": { + "name": { + "const": "opensuse", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "12.1", + "12.2", + "12.3", + "13.1", + "13.2", + "15.0", + "15.1", + "15.2", + "15.3", + "15.4", + "15.5", + "42.1", + "42.2", + "42.3", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "opensusePlatformModel", + "type": "object" + }, + "os10PlatformModel": { + "properties": { + "name": { + "const": "os10", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "os10PlatformModel", + "type": "object" + }, + "platforms": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/AIXPlatformModel" + }, + { + "$ref": "#/$defs/AlpinePlatformModel" + }, + { + "$ref": "#/$defs/AmazonPlatformModel" + }, + { + "$ref": "#/$defs/Amazon_Linux_2PlatformModel" + }, + { + "$ref": "#/$defs/aosPlatformModel" + }, + { + "$ref": "#/$defs/ArchLinuxPlatformModel" + }, + { + "$ref": "#/$defs/ClearLinuxPlatformModel" + }, + { + "$ref": "#/$defs/CumulusPlatformModel" + }, + { + "$ref": "#/$defs/DebianPlatformModel" + }, + { + "$ref": "#/$defs/DellOSPlatformModel" + }, + { + "$ref": "#/$defs/DevuanPlatformModel" + }, + { + "$ref": "#/$defs/DragonFlyBSDPlatformModel" + }, + { + "$ref": "#/$defs/ELPlatformModel" + }, + { + "$ref": "#/$defs/eosPlatformModel" + }, + { + "$ref": "#/$defs/FedoraPlatformModel" + }, + { + "$ref": "#/$defs/FreeBSDPlatformModel" + }, + { + "$ref": "#/$defs/GenericBSDPlatformModel" + }, + { + "$ref": "#/$defs/GenericLinuxPlatformModel" + }, + { + "$ref": "#/$defs/GenericUNIXPlatformModel" + }, + { + "$ref": "#/$defs/GentooPlatformModel" + }, + { + "$ref": "#/$defs/HardenedBSDPlatformModel" + }, + { + "$ref": "#/$defs/IOSPlatformModel" + }, + { + "$ref": "#/$defs/JunosPlatformModel" + }, + { + "$ref": "#/$defs/macOSPlatformModel" + }, + { + "$ref": "#/$defs/MacOSXPlatformModel" + }, + { + "$ref": "#/$defs/MageiaPlatformModel" + }, + { + "$ref": "#/$defs/NXOSPlatformModel" + }, + { + "$ref": "#/$defs/OpenBSDPlatformModel" + }, + { + "$ref": "#/$defs/opensusePlatformModel" + }, + { + "$ref": "#/$defs/OpenWrtPlatformModel" + }, + { + "$ref": "#/$defs/OracleLinuxPlatformModel" + }, + { + "$ref": "#/$defs/os10PlatformModel" + }, + { + "$ref": "#/$defs/PAN-OSPlatformModel" + }, + { + "$ref": "#/$defs/SLESPlatformModel" + }, + { + "$ref": "#/$defs/SmartOSPlatformModel" + }, + { + "$ref": "#/$defs/SolarisPlatformModel" + }, + { + "$ref": "#/$defs/SynologyPlatformModel" + }, + { + "$ref": "#/$defs/TMOSPlatformModel" + }, + { + "$ref": "#/$defs/UbuntuPlatformModel" + }, + { + "$ref": "#/$defs/vCenterPlatformModel" + }, + { + "$ref": "#/$defs/Void_LinuxPlatformModel" + }, + { + "$ref": "#/$defs/vSpherePlatformModel" + }, + { + "$ref": "#/$defs/WindowsPlatformModel" + } + ] + }, + "title": "Platforms", + "type": "array" + }, + "vCenterPlatformModel": { + "properties": { + "name": { + "const": "vCenter", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.5", "6.0", "6.5", "6.7", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "vCenterPlatformModel", + "type": "object" + }, + "vSpherePlatformModel": { + "properties": { + "name": { + "const": "vSphere", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.5", "6.0", "6.5", "6.7", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "vSpherePlatformModel", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": ["meta/main.yml"], + "properties": { + "additionalProperties": false, + "allow_duplicates": { + "title": "Allow Duplicates", + "type": "boolean" + }, + "collections": { + "$ref": "#/$defs/collections" + }, + "dependencies": { + "items": { + "$ref": "#/$defs/DependencyModel" + }, + "title": "Dependencies", + "type": "array" + }, + "galaxy_info": { + "$ref": "#/$defs/GalaxyInfoModel" + } + }, + "title": "Ansible Meta Schema v1/v2", + "type": ["object", "null"] +} diff --git a/src/ansiblelint/schemas/molecule.json b/src/ansiblelint/schemas/molecule.json new file mode 100644 index 0000000..5c45a5e --- /dev/null +++ b/src/ansiblelint/schemas/molecule.json @@ -0,0 +1,561 @@ +{ + "$defs": { + "ContainerRegistryModel": { + "additionalProperties": false, + "properties": { + "url": { + "title": "Url", + "type": "string" + } + }, + "required": ["url"], + "title": "ContainerRegistryModel", + "type": "object" + }, + "MoleculeDependencyModel": { + "additionalProperties": false, + "properties": { + "command": { + "title": "Command", + "type": ["string", "null"] + }, + "enabled": { + "default": true, + "title": "Enabled", + "type": "boolean" + }, + "env": { + "title": "Env", + "type": "object" + }, + "name": { + "enum": ["galaxy", "shell"], + "title": "Name", + "type": "string" + }, + "options": { + "title": "Options", + "type": "object" + } + }, + "required": ["name"], + "title": "MoleculeDependencyModel", + "type": "object" + }, + "MoleculeDriverModel": { + "additionalProperties": false, + "properties": { + "cachier": { + "title": "Cachier", + "type": "string" + }, + "default_box": { + "title": "DefaultBox", + "type": "string" + }, + "name": { + "enum": [ + "azure", + "ec2", + "delegated", + "docker", + "containers", + "openstack", + "podman", + "vagrant", + "digitalocean", + "gce", + "libvirt", + "lxd" + ], + "title": "Name", + "type": "string" + }, + "options": { + "$ref": "#/$defs/MoleculeDriverOptionsModel" + }, + "parallel": { + "title": "Parallel", + "type": "boolean" + }, + "provider": { + "title": "Provider", + "type": "object" + }, + "provision": { + "title": "Provision", + "type": "boolean" + }, + "safe_files": { + "items": { + "type": "string" + }, + "title": "SafeFiles", + "type": "array" + }, + "ssh_connection_options": { + "items": { + "type": "string" + }, + "title": "SshConnectionOptions", + "type": "array" + } + }, + "title": "MoleculeDriverModel", + "type": "object" + }, + "MoleculeDriverOptionsModel": { + "additionalProperties": false, + "properties": { + "ansible_connection_options": { + "additionalProperties": { + "type": "string" + }, + "title": "Ansible Connection Options", + "type": "object" + }, + "login_cmd_template": { + "title": "Login Cmd Template", + "type": "string" + }, + "managed": { + "title": "Managed", + "type": "boolean" + } + }, + "title": "MoleculeDriverOptionsModel", + "type": "object" + }, + "MoleculePlatformModel": { + "additionalProperties": true, + "properties": { + "box": { + "title": "Box", + "type": "string" + }, + "cgroupns": { + "title": "Cgroupns", + "type": "string" + }, + "children": { + "items": { + "type": "string" + }, + "type": "array" + }, + "command": { + "title": "Command", + "type": "string" + }, + "cpus": { + "title": "Cpus", + "type": "integer" + }, + "dockerfile": { + "title": "Dockerfile", + "type": "string" + }, + "env": { + "items": { + "type": "object" + }, + "title": "Platform Environment Variables", + "type": "array" + }, + "environment": { + "additionalProperties": { + "type": "string" + }, + "title": "Environment", + "type": "object" + }, + "groups": { + "items": { + "type": "string" + }, + "title": "Groups", + "type": "array" + }, + "hostname": { + "title": "Hostname", + "type": ["string", "boolean"] + }, + "image": { + "title": "Image", + "type": ["string", "null"] + }, + "interfaces": { + "title": "Interfaces", + "type": "array" + }, + "memory": { + "title": "Memory", + "type": "integer" + }, + "name": { + "title": "Name", + "type": "string" + }, + "network_mode": { + "anyOf": [ + { + "enum": ["bridge", "host", "none"], + "type": "string" + }, + { + "pattern": "^service:[a-zA-Z0-9:_.\\\\-]+$", + "type": "string" + }, + { + "pattern": "^container:[a-zA-Z0-9][a-zA-Z0-9_.-]+$", + "type": "string" + } + ], + "title": "Network Mode" + }, + "networks": { + "items": { + "$ref": "#/$defs/platform-network" + }, + "markdownDescription": "Used by docker and podman drivers.", + "title": "Networks", + "type": "array" + }, + "pkg_extras": { + "title": "Pkg Extras", + "type": "string" + }, + "pre_build_image": { + "title": "Pre Build Image", + "type": "boolean" + }, + "privileged": { + "title": "Privileged", + "type": "boolean" + }, + "provider_options": { + "title": "Provider options", + "type": "object" + }, + "provider_raw_config_args": { + "items": { + "type": "string" + }, + "title": "Provider Raw Config Args", + "type": "array" + }, + "registry": { + "$ref": "#/$defs/ContainerRegistryModel" + }, + "tmpfs": { + "items": { + "type": "string" + }, + "title": "Tmpfs", + "type": "array" + }, + "ulimits": { + "items": { + "type": "string" + }, + "title": "Ulimits", + "type": "array" + }, + "volumes": { + "items": { + "type": "string" + }, + "title": "Volumes", + "type": "array" + } + }, + "required": ["name"], + "title": "MoleculePlatformModel", + "type": "object" + }, + "MoleculeScenarioModel": { + "additionalProperties": false, + "properties": { + "check_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "cleanup_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "converge_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "create_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "dependency_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "destroy_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "idempotence_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "lint_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "name": { + "title": "Name", + "type": "string" + }, + "prepare_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "side_effect_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "syntax_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "test_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "verify_sequence": { + "$ref": "#/$defs/ScenarioSequence" + } + }, + "title": "MoleculeScenarioModel", + "type": "object" + }, + "ProvisionerConfigOptionsDefaultsModel": { + "additionalProperties": true, + "properties": { + "ansible_managed": { + "default": "Ansible managed: Do NOT edit this file manually!", + "title": "Ansible Managed", + "type": "string" + }, + "display_failed_stderr": { + "default": true, + "title": "Display Failed Stderr", + "type": "boolean" + }, + "fact_caching": { + "title": "Fact Caching", + "type": "string" + }, + "fact_caching_connection": { + "title": "Fact Caching Connection", + "type": "string" + }, + "forks": { + "default": 50, + "title": "Forks", + "type": "integer" + }, + "host_key_checking": { + "default": false, + "title": "Host Key Checking", + "type": "boolean" + }, + "interpreter_python": { + "default": "auto_silent", + "description": "See https://docs.ansible.com/ansible/devel/reference_appendices/interpreter_discovery.html", + "title": "Interpreter Python", + "type": "string" + }, + "nocows": { + "default": 1, + "title": "Nocows", + "type": "integer" + }, + "retry_files_enabled": { + "default": false, + "title": "Retry Files Enabled", + "type": "boolean" + } + }, + "title": "ProvisionerConfigOptionsDefaultsModel", + "type": "object" + }, + "ProvisionerConfigOptionsModel": { + "additionalProperties": true, + "properties": { + "defaults": { + "$ref": "#/$defs/ProvisionerConfigOptionsDefaultsModel" + }, + "ssh_connection": { + "$ref": "#/$defs/ProvisionerConfigOptionsSshConnectionModel" + } + }, + "title": "ProvisionerConfigOptionsModel", + "type": "object" + }, + "ProvisionerConfigOptionsSshConnectionModel": { + "additionalProperties": false, + "properties": { + "control_path": { + "default": "%(directory)s/%%h-%%p-%%r", + "title": "Control Path", + "type": "string" + }, + "scp_if_ssh": { + "default": true, + "title": "Scp If Ssh", + "type": "boolean" + } + }, + "title": "ProvisionerConfigOptionsSshConnectionModel", + "type": "object" + }, + "ProvisionerModel": { + "additionalProperties": true, + "properties": { + "config_options": { + "$ref": "#/$defs/ProvisionerConfigOptionsModel" + }, + "env": { + "title": "Env", + "type": "object" + }, + "inventory": { + "title": "Inventory", + "type": "object" + }, + "log": { + "title": "Log", + "type": "boolean" + }, + "name": { + "enum": ["ansible"], + "title": "Name", + "type": "string" + }, + "playbooks": { + "title": "Playbooks", + "type": "object" + } + }, + "title": "ProvisionerModel", + "type": "object" + }, + "ScenarioSequence": { + "additionalProperties": false, + "items": { + "enum": [ + "check", + "cleanup", + "converge", + "create", + "dependency", + "destroy", + "idempotence", + "lint", + "prepare", + "side_effect", + "syntax", + "test", + "verify" + ], + "type": "string" + }, + "title": "ScenarioSequence", + "type": "array" + }, + "VerifierModel": { + "additionalProperties": false, + "properties": { + "additional_files_or_dirs": { + "items": { + "type": "string" + }, + "title": "AdditionalFilesOrDirs", + "type": "array" + }, + "enabled": { + "title": "Enabled", + "type": "boolean" + }, + "env": { + "title": "Env", + "type": "object" + }, + "name": { + "default": "ansible", + "enum": ["ansible", "goss", "inspec", "testinfra"], + "title": "Name", + "type": "string" + }, + "options": { + "title": "Options", + "type": "object" + } + }, + "title": "VerifierModel", + "type": "object" + }, + "platform-network": { + "properties": { + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "ipv4_address": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": ["name"], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansible-lint/schemas/molecule.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["molecule/*/molecule.yml"], + "properties": { + "dependency": { + "$ref": "#/$defs/MoleculeDependencyModel" + }, + "driver": { + "$ref": "#/$defs/MoleculeDriverModel" + }, + "lint": { + "title": "Lint", + "type": "string" + }, + "log": { + "default": true, + "title": "Log", + "type": "boolean" + }, + "platforms": { + "items": { + "$ref": "#/$defs/MoleculePlatformModel" + }, + "title": "Platforms", + "type": "array" + }, + "prerun": { + "title": "Prerun", + "type": "boolean" + }, + "provisioner": { + "$ref": "#/$defs/ProvisionerModel" + }, + "role_name_check": { + "enum": [0, 1, 2], + "title": "RoleNameCheck", + "type": "integer" + }, + "scenario": { + "$ref": "#/$defs/MoleculeScenarioModel" + }, + "verifier": { + "$ref": "#/$defs/VerifierModel" + } + }, + "required": ["driver", "platforms"], + "title": "Molecule Scenario Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/playbook.json b/src/ansiblelint/schemas/playbook.json new file mode 100644 index 0000000..2ed4472 --- /dev/null +++ b/src/ansiblelint/schemas/playbook.json @@ -0,0 +1,1221 @@ +{ + "$comment": "Generated from ansible.json, do not edit.", + "$defs": { + "ansible.builtin.import_playbook": { + "additionalProperties": false, + "oneOf": [ + { + "not": { + "required": [ + "import_playbook" + ] + }, + "required": [ + "ansible.builtin.import_playbook" + ] + }, + { + "not": { + "required": [ + "ansible.builtin.import_playbook" + ] + }, + "required": [ + "import_playbook" + ] + } + ], + "patternProperties": { + "^(ansible\\.builtin\\.)?import_playbook$": { + "markdownDescription": "* Includes a file with a list of plays to be executed.\n * Files with a list of plays can only be included at the top level.\n * You cannot use this action inside a play.\n\nSee [import_playbook](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/import_playbook_module.html)", + "title": "Import Playbook", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "type": "object" + }, + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "block" + ], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "play": { + "additionalProperties": false, + "allOf": [ + { + "not": { + "required": [ + "ansible.builtin.import_playbook" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + } + ], + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "fact_path": { + "title": "Fact Path", + "type": "string" + }, + "force_handlers": { + "title": "Force Handlers", + "type": "boolean" + }, + "gather_facts": { + "title": "Gather Facts", + "type": "boolean" + }, + "gather_subset": { + "items": { + "anyOf": [ + { + "enum": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ], + "type": "string" + }, + { + "enum": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ], + "type": "string" + } + ] + }, + "title": "Gather Subset", + "type": "array" + }, + "gather_timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Gather Timeout" + }, + "handlers": { + "$ref": "#/$defs/tasks" + }, + "hosts": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Hosts" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "max_fail_percentage": { + "title": "Max Fail Percentage", + "type": "number" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "order": { + "enum": [ + "default", + "sorted", + "reverse_sorted", + "reverse_inventory", + "shuffle" + ], + "title": "Order", + "type": "string" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "post_tasks": { + "$ref": "#/$defs/tasks" + }, + "pre_tasks": { + "$ref": "#/$defs/tasks" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "roles": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/play-role" + }, + { + "type": "string" + } + ] + }, + "markdownDescription": "Roles let you automatically load related vars, files, tasks, handlers, and other Ansible artifacts based on a known file structure. After you group your content in roles, you can easily reuse them and share them with other users.\n See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "title": "Roles", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "serial": { + "anyOf": [ + { + "$ref": "#/$defs/templated-integer-or-percent" + }, + { + "items": { + "$ref": "#/$defs/templated-integer-or-percent" + }, + "type": "array" + } + ], + "markdownDescription": "Integer, percentage or list of those. See [Setting the batch size with serial](https://docs.ansible.com/ansible/latest/user_guide/playbooks_strategies.html#setting-the-batch-size-with-serial)", + "title": "Batch size" + }, + "strategy": { + "title": "Strategy", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "tasks": { + "$ref": "#/$defs/tasks" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "user": { + "title": "Remote User", + "type": "string" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "vars_files": { + "items": { + "type": "string" + }, + "title": "Vars Files", + "type": [ + "array", + "string", + "null" + ] + }, + "vars_prompt": { + "items": { + "$ref": "#/$defs/vars_prompt" + }, + "markdownDescription": "See [vars_prompt](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html)", + "title": "vars_prompt", + "type": "array" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "hosts" + ], + "title": "play", + "type": "object" + }, + "play-role": { + "markdownDescription": "See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "role" + ], + "title": "play-role", + "type": "object" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": [ + "hosts" + ] + } + }, + { + "not": { + "required": [ + "tasks" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + }, + { + "not": { + "required": [ + "block" + ] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": [ + "string", + "object" + ] + }, + "loop": { + "title": "Loop", + "type": [ + "string", + "array" + ] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "tasks": { + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "tasks/*.yml", + "handlers/*.yml" + ], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": [ + "array", + "null" + ] + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer-or-percent": { + "oneOf": [ + { + "type": "integer" + }, + { + "pattern": "^\\d+\\.?\\d*%?$", + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "vars_prompt": { + "additionalProperties": false, + "properties": { + "confirm": { + "title": "Confirm", + "type": "boolean" + }, + "default": { + "title": "Default", + "type": "string" + }, + "encrypt": { + "enum": [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", + "md5_crypt", + "bcrypt", + "sha1_crypt", + "sun_md5_crypt", + "sha256_crypt", + "sha512_crypt", + "apr_md5_crypt", + "phpass", + "pbkdf2_digest", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "scram", + "bsd_nthash" + ], + "title": "Encrypt", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "private": { + "default": true, + "title": "Private", + "type": "boolean" + }, + "prompt": { + "title": "Prompt", + "type": "string" + }, + "salt_size": { + "default": 8, + "title": "Salt Size", + "type": "integer" + }, + "unsafe": { + "default": false, + "markdownDescription": "See [unsafe](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html#allowing-special-characters-in-vars-prompt-values)", + "title": "Unsafe", + "type": "boolean" + } + }, + "required": [ + "name", + "prompt" + ], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/playbook.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "playbooks/*.yml", + "playbooks/*.yaml" + ], + "items": { + "oneOf": [ + { + "$ref": "#/$defs/ansible.builtin.import_playbook" + }, + { + "$ref": "#/$defs/play" + } + ] + }, + "title": "Ansible Playbook", + "type": "array" +} diff --git a/src/ansiblelint/schemas/requirements.json b/src/ansiblelint/schemas/requirements.json new file mode 100644 index 0000000..73c8a85 --- /dev/null +++ b/src/ansiblelint/schemas/requirements.json @@ -0,0 +1,135 @@ +{ + "$defs": { + "CollectionModel": { + "additionalProperties": false, + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "source": { + "title": "Source", + "type": "string" + }, + "type": { + "enum": ["galaxy", "url", "file", "git", "dir", "subdirs"], + "title": "Type", + "type": "string" + }, + "version": { + "title": "Version", + "type": "string" + } + }, + "title": "CollectionModel", + "type": "object" + }, + "CollectionStringModel": { + "title": "CollectionStringModel", + "type": "string" + }, + "IncludeModel": { + "properties": { + "include": { + "title": "Include", + "type": "string" + } + }, + "required": ["include"], + "title": "IncludeModel", + "type": "object" + }, + "RequirementsV2Model": { + "additionalProperties": false, + "anyOf": [ + { + "required": ["collections"] + }, + { + "required": ["roles"] + } + ], + "properties": { + "collections": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/CollectionModel" + }, + { + "$ref": "#/$defs/CollectionStringModel" + } + ] + }, + "title": "Collections", + "type": "array" + }, + "roles": { + "items": { + "$ref": "#/$defs/RoleModel" + }, + "title": "Roles", + "type": "array" + } + }, + "title": "Requirements v2", + "type": "object" + }, + "RoleModel": { + "additionalProperties": false, + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "scm": { + "anyOf": [ + { + "enum": ["git"], + "type": "string" + }, + { + "enum": ["hg"], + "type": "string" + } + ], + "default": "git", + "title": "Scm" + }, + "src": { + "title": "Src", + "type": "string" + }, + "version": { + "default": "master", + "title": "Version", + "type": "string" + } + }, + "title": "Role", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-requirements.json", + "$schema": "http://json-schema.org/draft-07/schema", + "anyOf": [ + { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/RoleModel" + }, + { + "$ref": "#/$defs/IncludeModel" + } + ] + }, + "type": "array" + }, + { + "$ref": "#/$defs/RequirementsV2Model" + } + ], + "examples": ["requirements.yml"], + "title": "Ansible Requirements Schema" +} diff --git a/src/ansiblelint/schemas/tasks.json b/src/ansiblelint/schemas/tasks.json new file mode 100644 index 0000000..51f5fb3 --- /dev/null +++ b/src/ansiblelint/schemas/tasks.json @@ -0,0 +1,574 @@ +{ + "$comment": "Generated from ansible.json, do not edit.", + "$defs": { + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "block" + ], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": [ + "hosts" + ] + } + }, + { + "not": { + "required": [ + "tasks" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + }, + { + "not": { + "required": [ + "block" + ] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": [ + "string", + "object" + ] + }, + "loop": { + "title": "Loop", + "type": [ + "string", + "array" + ] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/tasks.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "tasks/*.yml", + "handlers/*.yml" + ], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": [ + "array", + "null" + ] +} diff --git a/src/ansiblelint/schemas/vars.json b/src/ansiblelint/schemas/vars.json new file mode 100644 index 0000000..28bf7ba --- /dev/null +++ b/src/ansiblelint/schemas/vars.json @@ -0,0 +1,29 @@ +{ + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-vars.json", + "$schema": "http://json-schema.org/draft-07/schema", + "anyOf": [ + { + "additionalProperties": false, + "patternProperties": { + "^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\w]*$": {} + }, + "type": "object" + }, + { + "pattern": "^\\$ANSIBLE_VAULT;", + "type": "string" + }, + { + "type": "null" + } + ], + "examples": [ + "playbooks/vars/*.yml", + "vars/*.yml", + "defaults/*.yml", + "host_vars/*.yml", + "group_vars/*.yml" + ], + "markdownDescription": "See [Using Variables](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html)", + "title": "Ansible Vars Schema" +} diff --git a/src/ansiblelint/skip_utils.py b/src/ansiblelint/skip_utils.py new file mode 100644 index 0000000..be12171 --- /dev/null +++ b/src/ansiblelint/skip_utils.py @@ -0,0 +1,293 @@ +# (c) 2019–2020, Ansible by Red Hat +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +"""Utils related to inline skipping of rules.""" +from __future__ import annotations + +import collections.abc +import logging +import re +from functools import lru_cache +from itertools import product +from typing import TYPE_CHECKING, Any, Generator, Sequence + +# Module 'ruamel.yaml' does not explicitly export attribute 'YAML'; implicit reexport disabled +from ruamel.yaml import YAML +from ruamel.yaml.composer import ComposerError +from ruamel.yaml.scanner import ScannerError +from ruamel.yaml.tokens import CommentToken + +from ansiblelint.config import used_old_tags +from ansiblelint.constants import ( + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + RENAMED_TAGS, + SKIPPED_RULES_KEY, +) +from ansiblelint.file_utils import Lintable + +if TYPE_CHECKING: + from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject + +_logger = logging.getLogger(__name__) +_found_deprecated_tags: set[str] = set() +_noqa_comment_re = re.compile(r"^# noqa(\s|:)") + +# playbook: Sequence currently expects only instances of one of the two +# classes below but we should consider avoiding this chimera. +# ruamel.yaml.comments.CommentedSeq +# ansible.parsing.yaml.objects.AnsibleSequence + + +def get_rule_skips_from_line(line: str) -> list[str]: + """Return list of rule ids skipped via comment on the line of yaml.""" + _before_noqa, _noqa_marker, noqa_text = line.partition("# noqa") + + result = [] + for v in noqa_text.lstrip(" :").split(): + if v in RENAMED_TAGS: + tag = RENAMED_TAGS[v] + if v not in _found_deprecated_tags: + _logger.warning( + "Replaced outdated tag '%s' with '%s', replace it to avoid future regressions", + v, + tag, + ) + _found_deprecated_tags.add(v) + v = tag + result.append(v) + return result + + +def append_skipped_rules( + pyyaml_data: AnsibleBaseYAMLObject, lintable: Lintable +) -> AnsibleBaseYAMLObject: + """Append 'skipped_rules' to individual tasks or single metadata block. + + For a file, uses 2nd parser (ruamel.yaml) to pull comments out of + yaml subsets, check for '# noqa' skipped rules, and append any skips to the + original parser (pyyaml) data relied on by remainder of ansible-lint. + + :param pyyaml_data: file text parsed via ansible and pyyaml. + :param file_text: raw file text. + :param file_type: type of file: tasks, handlers or meta. + :returns: original pyyaml_data altered with a 'skipped_rules' list added \ + to individual tasks, or added to the single metadata block. + """ + try: + yaml_skip = _append_skipped_rules(pyyaml_data, lintable) + except RuntimeError: + # Notify user of skip error, do not stop, do not change exit code + _logger.error("Error trying to append skipped rules", exc_info=True) + return pyyaml_data + + if not yaml_skip: + return pyyaml_data + + return yaml_skip + + +@lru_cache(maxsize=None) +def load_data(file_text: str) -> Any: + """Parse ``file_text`` as yaml and return parsed structure. + + This is the main culprit for slow performance, each rule asks for loading yaml again and again + ideally the ``maxsize`` on the decorator above MUST be great or equal total number of rules + :param file_text: raw text to parse + :return: Parsed yaml + """ + yaml = YAML() + # Ruamel role is not to validate the yaml file, so we ignore duplicate keys: + yaml.allow_duplicate_keys = True + try: + return yaml.load(file_text) + except ComposerError: + # load fails on multi-documents with ComposerError exception + return yaml.load_all(file_text) + + +def _append_skipped_rules( # noqa: max-complexity: 12 + pyyaml_data: AnsibleBaseYAMLObject, lintable: Lintable +) -> AnsibleBaseYAMLObject | None: + # parse file text using 2nd parser library + try: + ruamel_data = load_data(lintable.content) + except ScannerError as exc: + _logger.debug( + "Ignored loading skipped rules from file %s due to: %s", lintable, exc + ) + # For unparsable file types, we return empty skip lists + return None + skipped_rules = _get_rule_skips_from_yaml(ruamel_data, lintable) + + if lintable.kind in [ + "yaml", + "requirements", + "vars", + "meta", + "reno", + "test-meta", + "galaxy", + ]: + # AnsibleMapping, dict + if hasattr(pyyaml_data, "get"): + pyyaml_data[SKIPPED_RULES_KEY] = skipped_rules + # AnsibleSequence, list + elif ( + not isinstance(pyyaml_data, str) + and isinstance(pyyaml_data, collections.abc.Sequence) + and skipped_rules + ): + pyyaml_data[0][SKIPPED_RULES_KEY] = skipped_rules + + return pyyaml_data + + # create list of blocks of tasks or nested tasks + if lintable.kind in ("tasks", "handlers"): + ruamel_task_blocks = ruamel_data + pyyaml_task_blocks = pyyaml_data + elif lintable.kind == "playbook": + try: + pyyaml_task_blocks = _get_task_blocks_from_playbook(pyyaml_data) + ruamel_task_blocks = _get_task_blocks_from_playbook(ruamel_data) + except (AttributeError, TypeError): + return pyyaml_data + else: + # For unsupported file types, we return empty skip lists + return None + + # get tasks from blocks of tasks + pyyaml_tasks = _get_tasks_from_blocks(pyyaml_task_blocks) + ruamel_tasks = _get_tasks_from_blocks(ruamel_task_blocks) + + # append skipped_rules for each task + for ruamel_task, pyyaml_task in zip(ruamel_tasks, pyyaml_tasks): + # ignore empty tasks + if not pyyaml_task and not ruamel_task: + continue + + # AnsibleUnicode or str + if isinstance(pyyaml_task, str): + continue + + if pyyaml_task.get("name") != ruamel_task.get("name"): + raise RuntimeError("Error in matching skip comment to a task") + pyyaml_task[SKIPPED_RULES_KEY] = _get_rule_skips_from_yaml( + ruamel_task, lintable + ) + + return pyyaml_data + + +def _get_task_blocks_from_playbook(playbook: Sequence[Any]) -> list[Any]: + """Return parts of playbook that contains tasks, and nested tasks. + + :param playbook: playbook yaml from yaml parser. + :returns: list of task dictionaries. + """ + task_blocks = [] + for play, key in product(playbook, PLAYBOOK_TASK_KEYWORDS): + task_blocks.extend(play.get(key, [])) + return task_blocks + + +def _get_tasks_from_blocks(task_blocks: Sequence[Any]) -> Generator[Any, None, None]: + """Get list of tasks from list made of tasks and nested tasks.""" + if not task_blocks: + return + + def get_nested_tasks(task: Any) -> Generator[Any, None, None]: + if not task or not is_nested_task(task): + return + for k in NESTED_TASK_KEYS: + if k in task and task[k]: + if hasattr(task[k], "get"): + continue + for subtask in task[k]: + yield from get_nested_tasks(subtask) + yield subtask + + for task in task_blocks: + yield from get_nested_tasks(task) + yield task + + +def _get_rule_skips_from_yaml( # noqa: max-complexity: 12 + yaml_input: Sequence[Any], lintable: Lintable +) -> Sequence[Any]: + """Traverse yaml for comments with rule skips and return list of rules.""" + yaml_comment_obj_strings = [] + + if isinstance(yaml_input, str): + return [] + + def traverse_yaml(obj: Any) -> None: + for _, entry in obj.ca.items.items(): + for v in entry: + if isinstance(v, CommentToken): + comment_str = v.value + if _noqa_comment_re.match(comment_str): + line = v.start_mark.line + 1 # ruamel line numbers start at 0 + # column = v.start_mark.column + 1 # ruamel column numbers start at 0 + lintable.line_skips[line].update( + get_rule_skips_from_line(comment_str.strip()) + ) + yaml_comment_obj_strings.append(str(obj.ca.items)) + if isinstance(obj, dict): + for _, val in obj.items(): + if isinstance(val, (dict, list)): + traverse_yaml(val) + elif isinstance(obj, list): + for element in obj: + if isinstance(element, (dict, list)): + traverse_yaml(element) + else: + return + + if isinstance(yaml_input, (dict, list)): + traverse_yaml(yaml_input) + + rule_id_list = [] + for comment_obj_str in yaml_comment_obj_strings: + for line in comment_obj_str.split(r"\n"): + rule_id_list.extend(get_rule_skips_from_line(line)) + + return [normalize_tag(tag) for tag in rule_id_list] + + +def normalize_tag(tag: str) -> str: + """Return current name of tag.""" + if tag in RENAMED_TAGS: + used_old_tags[tag] = RENAMED_TAGS[tag] + return RENAMED_TAGS[tag] + return tag + + +def is_nested_task(task: dict[str, Any]) -> bool: + """Check if task includes block/always/rescue.""" + # Cannot really trust the input + if isinstance(task, str): + return False + + for key in NESTED_TASK_KEYS: + if task.get(key): + return True + + return False diff --git a/src/ansiblelint/stats.py b/src/ansiblelint/stats.py new file mode 100644 index 0000000..67320b8 --- /dev/null +++ b/src/ansiblelint/stats.py @@ -0,0 +1,36 @@ +"""Module hosting functionality about reporting.""" +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(order=True) +class TagStats: + """Tag statistics.""" + + order: int = 0 # to be computed based on rule's profile + tag: str = "" # rule effective id (can be multiple tags per rule id) + count: int = 0 # total number of occurrences + warning: bool = False # set true if listed in warn_list + profile: str = "" + associated_tags: list[str] = field(default_factory=list) + + +class SummarizedResults: + """The statistics about an ansible-lint run.""" + + failures: int = 0 + warnings: int = 0 + fixed_failures: int = 0 + fixed_warnings: int = 0 + tag_stats: dict[str, TagStats] = {} + passed_profile: str = "" + + @property + def fixed(self) -> int: + """Get total fixed count.""" + return self.fixed_failures + self.fixed_warnings + + def sort(self) -> None: + """Sort tag stats by tag name.""" + self.tag_stats = dict(sorted(self.tag_stats.items(), key=lambda t: t[1])) diff --git a/src/ansiblelint/testing/__init__.py b/src/ansiblelint/testing/__init__.py new file mode 100644 index 0000000..a4ebcc8 --- /dev/null +++ b/src/ansiblelint/testing/__init__.py @@ -0,0 +1,147 @@ +"""Test utils for ansible-lint.""" +from __future__ import annotations + +import os +import shutil +import subprocess +import sys +import tempfile +from typing import TYPE_CHECKING, Any + +from ansiblelint.app import get_app +from ansiblelint.rules import RulesCollection + +if TYPE_CHECKING: + # https://github.com/PyCQA/pylint/issues/3240 + # pylint: disable=unsubscriptable-object + CompletedProcess = subprocess.CompletedProcess[Any] + from ansiblelint.errors import MatchError # noqa: E402 +else: + CompletedProcess = subprocess.CompletedProcess + +# pylint: disable=wrong-import-position +from ansiblelint.runner import Runner # noqa: E402 + + +class RunFromText: + """Use Runner on temp files created from testing text snippets.""" + + app = None + + def __init__(self, collection: RulesCollection) -> None: + """Initialize a RunFromText instance with rules collection.""" + # Emulate command line execution initialization as without it Ansible module + # would be loaded with incomplete module/role/collection list. + if not self.app: # pragma: no cover + self.app = get_app() + + self.collection = collection + + def _call_runner(self, path: str) -> list[MatchError]: + runner = Runner(path, rules=self.collection) + return runner.run() + + def run(self, filename: str) -> list[MatchError]: + """Lints received filename.""" + return self._call_runner(filename) + + def run_playbook( + self, playbook_text: str, prefix: str = "playbook" + ) -> list[MatchError]: + """Lints received text as a playbook.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".yml", prefix=prefix) as fh: + fh.write(playbook_text) + fh.flush() + results = self._call_runner(fh.name) + return results + + def run_role_tasks_main(self, tasks_main_text: str) -> list[MatchError]: + """Lints received text as tasks.""" + role_path = tempfile.mkdtemp(prefix="role_") + tasks_path = os.path.join(role_path, "tasks") + os.makedirs(tasks_path) + with open(os.path.join(tasks_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(tasks_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + def run_role_meta_main(self, meta_main_text: str) -> list[MatchError]: + """Lints received text as meta.""" + role_path = tempfile.mkdtemp(prefix="role_") + meta_path = os.path.join(role_path, "meta") + os.makedirs(meta_path) + with open(os.path.join(meta_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(meta_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + def run_role_defaults_main(self, defaults_main_text: str) -> list[MatchError]: + """Lints received text as vars file in defaults.""" + role_path = tempfile.mkdtemp(prefix="role_") + defaults_path = os.path.join(role_path, "defaults") + os.makedirs(defaults_path) + with open(os.path.join(defaults_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(defaults_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + +def run_ansible_lint( + *argv: str, + cwd: str | None = None, + executable: str | None = None, + env: dict[str, str] | None = None, + offline: bool = True, +) -> CompletedProcess: + """Run ansible-lint on a given path and returns its output.""" + args = [*argv] + if offline: # pragma: no cover + args.insert(0, "--offline") + + if not executable: + executable = sys.executable + args = [sys.executable, "-m", "ansiblelint", *args] + else: + args = [executable, *args] + + # It is not safe to pass entire env for testing as other tests would + # pollute the env, causing weird behaviors, so we pass only a safe list of + # vars. + safe_list = [ + "COVERAGE_FILE", + "COVERAGE_PROCESS_START", + "HOME", + "LANG", + "LC_ALL", + "LC_CTYPE", + "NO_COLOR", + "PATH", + "PYTHONIOENCODING", + "PYTHONPATH", + "TERM", + "VIRTUAL_ENV", + ] + + if env is None: + _env = {} + else: + _env = env + for v in safe_list: + if v in os.environ and v not in _env: + _env[v] = os.environ[v] + + return subprocess.run( + args, + capture_output=True, + shell=False, # needed when command is a list + check=False, + cwd=cwd, + env=_env, + text=True, + ) diff --git a/src/ansiblelint/testing/fixtures.py b/src/ansiblelint/testing/fixtures.py new file mode 100644 index 0000000..bf7160f --- /dev/null +++ b/src/ansiblelint/testing/fixtures.py @@ -0,0 +1,55 @@ +"""PyTest Fixtures. + +They should not be imported, instead add code below to your root conftest.py +file: + +pytest_plugins = ['ansiblelint.testing'] +""" +from __future__ import annotations + +import copy +import os +from argparse import Namespace +from typing import Iterator + +import pytest +from _pytest.fixtures import SubRequest + +from ansiblelint.config import options # noqa: F401 +from ansiblelint.constants import DEFAULT_RULESDIR +from ansiblelint.rules import RulesCollection +from ansiblelint.testing import RunFromText + + +@pytest.fixture(name="default_rules_collection") +def fixture_default_rules_collection() -> RulesCollection: + """Return default rule collection.""" + assert os.path.isdir(DEFAULT_RULESDIR) + # For testing we want to manually enable opt-in rules + options.enable_list = ["no-same-owner"] + return RulesCollection(rulesdirs=[DEFAULT_RULESDIR], options=options) + + +@pytest.fixture +def default_text_runner(default_rules_collection: RulesCollection) -> RunFromText: + """Return RunFromText instance for the default set of collections.""" + return RunFromText(default_rules_collection) + + +@pytest.fixture +def rule_runner(request: SubRequest, config_options: Namespace) -> RunFromText: + """Return runner for a specific rule class.""" + rule_class = request.param + config_options.enable_list.append(rule_class().id) + collection = RulesCollection(options=config_options) + collection.register(rule_class()) + return RunFromText(collection) + + +@pytest.fixture(name="config_options") +def fixture_config_options() -> Iterator[Namespace]: + """Return configuration options that will be restored after testrun.""" + global options # pylint: disable=global-statement,invalid-name + original_options = copy.deepcopy(options) + yield options + options = original_options diff --git a/src/ansiblelint/text.py b/src/ansiblelint/text.py new file mode 100644 index 0000000..12ec9ad --- /dev/null +++ b/src/ansiblelint/text.py @@ -0,0 +1,50 @@ +"""Text utils.""" +from __future__ import annotations + +import re +from functools import lru_cache + +RE_HAS_JINJA = re.compile(r"{[{%#].*[%#}]}", re.DOTALL) +RE_HAS_GLOB = re.compile("[][*?]") + + +def strip_ansi_escape(data: str | bytes) -> str: + """Remove all ANSI escapes from string or bytes. + + If bytes is passed instead of string, it will be converted to string + using UTF-8. + """ + if isinstance(data, bytes): # pragma: no branch + data = data.decode("utf-8") + + return re.sub(r"\x1b[^m]*m", "", data) + + +def toidentifier(text: str) -> str: + """Convert unsafe chars to ones allowed in variables.""" + result = re.sub(r"[\s-]+", "_", text) + if not result.isidentifier(): + raise RuntimeError( + f"Unable to convert role name '{text}' to valid variable name." + ) + return result + + +# https://www.python.org/dev/peps/pep-0616/ +def removeprefix(self: str, prefix: str) -> str: + """Remove prefix from string.""" + if self.startswith(prefix): + return self[len(prefix) :] + return self[:] + + +@lru_cache(maxsize=None) +def has_jinja(value: str) -> bool: + """Return true if a string seems to contain jinja templating.""" + return bool(isinstance(value, str) and RE_HAS_JINJA.search(value)) + + +@lru_cache(maxsize=None) +def has_glob(value: str) -> bool: + """Return true if a string looks like having a glob pattern.""" + return bool(isinstance(value, str) and RE_HAS_GLOB.search(value)) diff --git a/src/ansiblelint/transformer.py b/src/ansiblelint/transformer.py new file mode 100644 index 0000000..097c7ca --- /dev/null +++ b/src/ansiblelint/transformer.py @@ -0,0 +1,141 @@ +"""Transformer implementation.""" +from __future__ import annotations + +import logging +from argparse import Namespace +from typing import Union, cast + +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, TransformMixin +from ansiblelint.runner import LintResult +from ansiblelint.yaml_utils import FormattedYAML, get_path_to_play, get_path_to_task + +__all__ = ["Transformer"] + +_logger = logging.getLogger(__name__) + + +# pylint: disable=too-few-public-methods +class Transformer: + """Transformer class marshals transformations. + + The Transformer is similar to the ``ansiblelint.runner.Runner`` which manages + running each of the rules. We only expect there to be one ``Transformer`` instance + which should be instantiated from the main entrypoint function. + + In the future, the transformer will be responsible for running transforms for each + of the rule matches. For now, it just reads/writes YAML files which is a + pre-requisite for the planned rule-specific transforms. + """ + + def __init__(self, result: LintResult, options: Namespace): + """Initialize a Transformer instance.""" + self.write_set = self.effective_write_set(options.write_list) + + self.matches: list[MatchError] = result.matches + self.files: set[Lintable] = result.files + + lintables: dict[str, Lintable] = {file.filename: file for file in result.files} + self.matches_per_file: dict[Lintable, list[MatchError]] = { + file: [] for file in result.files + } + + for match in self.matches: + try: + lintable = lintables[match.filename] + except KeyError: + # we shouldn't get here, but this is easy to recover from so do that. + lintable = Lintable(match.filename) + self.matches_per_file[lintable] = [] + self.matches_per_file[lintable].append(match) + + @staticmethod + def effective_write_set(write_list: list[str]) -> set[str]: + """Simplify write_list based on ``"none"`` and ``"all"`` keywords. + + ``"none"`` resets the enabled rule transforms. + This returns ``{"none"}`` or a set of everything after the last ``"none"``. + + If ``"all"`` is in the ``write_list`` (after ``"none"`` if present), + then this will return ``{"all"}``. + """ + none_indexes = [i for i, value in enumerate(write_list) if value == "none"] + if none_indexes: + index = none_indexes[-1] + if len(write_list) > index + 1: + index += 1 + write_list = write_list[index:] + if "all" in write_list: + return {"all"} + return set(write_list) + + def run(self) -> None: + """For each file, read it, execute transforms on it, then write it.""" + for file, matches in self.matches_per_file.items(): + # str() convinces mypy that "text/yaml" is a valid Literal. + # Otherwise, it thinks base_kind is one of playbook, meta, tasks, ... + file_is_yaml = str(file.base_kind) == "text/yaml" + + try: + data: str = file.content + except (UnicodeDecodeError, IsADirectoryError): + # we hit a binary file (eg a jar or tar.gz) or a directory + data = "" + file_is_yaml = False + + ruamel_data: CommentedMap | CommentedSeq | None = None + if file_is_yaml: + # We need a fresh YAML() instance for each load because ruamel.yaml + # stores intermediate state during load which could affect loading + # any other files. (Based on suggestion from ruamel.yaml author) + yaml = FormattedYAML() + + ruamel_data = yaml.loads(data) + if not isinstance(ruamel_data, (CommentedMap, CommentedSeq)): + # This is an empty vars file or similar which loads as None. + # It is not safe to write this file or data-loss is likely. + # Only maps and sequences can preserve comments. Skip it. + continue + + if self.write_set != {"none"}: + self._do_transforms(file, ruamel_data or data, file_is_yaml, matches) + + if file_is_yaml: + # noinspection PyUnboundLocalVariable + file.content = yaml.dumps(ruamel_data) + + if file.updated: + file.write() + + def _do_transforms( + self, + file: Lintable, + data: CommentedMap | CommentedSeq | str, + file_is_yaml: bool, + matches: list[MatchError], + ) -> None: + """Do Rule-Transforms handling any last-minute MatchError inspections.""" + for match in sorted(matches): + if not isinstance(match.rule, TransformMixin): + continue + if self.write_set != {"all"}: + rule = cast(AnsibleLintRule, match.rule) + rule_definition = set(rule.tags) + rule_definition.add(rule.id) + if rule_definition.isdisjoint(self.write_set): + # rule transform not requested. Skip it. + continue + if file_is_yaml and not match.yaml_path: + data = cast(Union[CommentedMap, CommentedSeq], data) + if match.match_type == "play": + match.yaml_path = get_path_to_play(file, match.linenumber, data) + elif match.task or file.kind in ( + "tasks", + "handlers", + "playbook", + ): + match.yaml_path = get_path_to_task(file, match.linenumber, data) + match.rule.transform(match, file, data) diff --git a/src/ansiblelint/utils.py b/src/ansiblelint/utils.py new file mode 100644 index 0000000..3e71e91 --- /dev/null +++ b/src/ansiblelint/utils.py @@ -0,0 +1,914 @@ +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# spell-checker:ignore dwim +"""Generic utility helpers.""" +from __future__ import annotations + +import contextlib +import inspect +import logging +import os +import re +import warnings +from argparse import Namespace +from collections.abc import ItemsView, Mapping +from functools import lru_cache +from pathlib import Path +from typing import Any, Callable, Generator, Sequence + +import yaml +from ansible.errors import AnsibleError, AnsibleParserError +from ansible.module_utils.parsing.convert_bool import boolean +from ansible.parsing.dataloader import DataLoader +from ansible.parsing.mod_args import ModuleArgsParser +from ansible.parsing.splitter import split_args +from ansible.parsing.yaml.constructor import AnsibleConstructor, AnsibleMapping +from ansible.parsing.yaml.loader import AnsibleLoader +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence +from ansible.plugins.loader import add_all_plugin_dirs +from ansible.template import Templar +from ansible.utils.collection_loader import AnsibleCollectionConfig +from yaml.composer import Composer +from yaml.representer import RepresenterError + +from ansiblelint._internal.rules import ( + AnsibleParserErrorRule, + LoadingFailureRule, + RuntimeErrorRule, +) +from ansiblelint.app import get_app +from ansiblelint.config import options +from ansiblelint.constants import ( + FILENAME_KEY, + INCLUSION_ACTION_NAMES, + LINE_NUMBER_KEY, + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + ROLE_IMPORT_ACTION_NAMES, + SKIPPED_RULES_KEY, + FileType, +) +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, discover_lintables +from ansiblelint.skip_utils import is_nested_task +from ansiblelint.text import removeprefix + +# ansible-lint doesn't need/want to know about encrypted secrets, so we pass a +# string as the password to enable such yaml files to be opened and parsed +# successfully. +DEFAULT_VAULT_PASSWORD = "x" +COLLECTION_PLAY_RE = re.compile(r"^[\w\d_]+\.[\w\d_]+\.[\w\d_]+$") + +PLAYBOOK_DIR = os.environ.get("ANSIBLE_PLAYBOOK_DIR", None) + + +_logger = logging.getLogger(__name__) + + +def parse_yaml_from_file(filepath: str) -> AnsibleBaseYAMLObject: + """Extract a decrypted YAML object from file.""" + dataloader = DataLoader() + if hasattr(dataloader, "set_vault_password"): + dataloader.set_vault_password(DEFAULT_VAULT_PASSWORD) + return dataloader.load_from_file(filepath) + + +def path_dwim(basedir: str, given: str) -> str: + """Convert a given path do-what-I-mean style.""" + dataloader = DataLoader() + dataloader.set_basedir(basedir) + return str(dataloader.path_dwim(given)) + + +def ansible_templar(basedir: str, templatevars: Any) -> Templar: + """Create an Ansible Templar using templatevars.""" + # `basedir` is the directory containing the lintable file. + # Therefore, for tasks in a role, `basedir` has the form + # `roles/some_role/tasks`. On the other hand, the search path + # is `roles/some_role/{files,templates}`. As a result, the + # `tasks` part in the basedir should be stripped stripped. + if os.path.basename(basedir) == "tasks": + basedir = os.path.dirname(basedir) + + dataloader = DataLoader() + dataloader.set_basedir(basedir) + templar = Templar(dataloader, variables=templatevars) + return templar + + +def ansible_template( + basedir: str, varname: Any, templatevars: Any, **kwargs: Any +) -> Any: + """Render a templated string by mocking missing filters.""" + templar = ansible_templar(basedir=basedir, templatevars=templatevars) + # pylint: disable=unused-variable + for i in range(3): + try: + kwargs["disable_lookups"] = True + return templar.template(varname, **kwargs) + except AnsibleError as exc: + if ( + "was found, however lookups were disabled from templating" + in exc.message + ): + # ansible core does an early exit when disable_lookup=True but + # this happens after the jinja2 syntax already passed. + break + if ( + exc.message.startswith("template error while templating string:") + and "'" in exc.message + ): + missing_filter = exc.message.split("'")[1] + if missing_filter == "end of print statement": + raise + # Mock the filter to avoid and error from Ansible templating + # pylint: disable=protected-access + templar.environment.filters._delegatee[missing_filter] = lambda x: x + # Record the mocked filter so we can warn the user + if missing_filter not in options.mock_filters: + _logger.debug("Mocking missing filter %s", missing_filter) + options.mock_filters.append(missing_filter) + continue + raise + + +BLOCK_NAME_TO_ACTION_TYPE_MAP = { + "tasks": "task", + "handlers": "handler", + "pre_tasks": "task", + "post_tasks": "task", + "block": "meta", + "rescue": "meta", + "always": "meta", +} + + +def tokenize(line: str) -> tuple[str, list[str], dict[str, str]]: + """Parse a string task invocation.""" + tokens = line.lstrip().split(" ") + if tokens[0] == "-": + tokens = tokens[1:] + if tokens[0] == "action:" or tokens[0] == "local_action:": + tokens = tokens[1:] + command = tokens[0].replace(":", "") + + args = [] + kwargs = {} + non_kv_found = False + for arg in tokens[1:]: + if "=" in arg and not non_kv_found: + key_value = arg.split("=", 1) + kwargs[key_value[0]] = key_value[1] + else: + non_kv_found = True + args.append(arg) + return (command, args, kwargs) + + +def _playbook_items(pb_data: AnsibleBaseYAMLObject) -> ItemsView: # type: ignore + if isinstance(pb_data, dict): + return pb_data.items() + if not pb_data: + return [] # type: ignore + + # "if play" prevents failure if the play sequence contains None, + # which is weird but currently allowed by Ansible + # https://github.com/ansible/ansible-lint/issues/849 + return [item for play in pb_data if play for item in play.items()] # type: ignore + + +def _set_collections_basedir(basedir: str) -> None: + # Sets the playbook directory as playbook_paths for the collection loader + AnsibleCollectionConfig.playbook_paths = basedir + + +def find_children(lintable: Lintable) -> list[Lintable]: # noqa: C901 + """Traverse children of a single file or folder.""" + if not lintable.path.exists(): + return [] + playbook_dir = str(lintable.path.parent) + _set_collections_basedir(playbook_dir or os.path.abspath(".")) + add_all_plugin_dirs(playbook_dir or ".") + if lintable.kind == "role": + playbook_ds = AnsibleMapping({"roles": [{"role": str(lintable.path)}]}) + elif lintable.kind not in ("playbook", "tasks"): + return [] + else: + try: + playbook_ds = parse_yaml_from_file(str(lintable.path)) + except AnsibleError as exc: + raise SystemExit(exc) from exc + results = [] + basedir = os.path.dirname(str(lintable.path)) + # playbook_ds can be an AnsibleUnicode string, which we consider invalid + if isinstance(playbook_ds, str): + raise MatchError(filename=lintable, rule=LoadingFailureRule()) + for item in _playbook_items(playbook_ds): + # if lintable.kind not in ["playbook"]: + # continue + for child in play_children(basedir, item, lintable.kind, playbook_dir): + # We avoid processing parametrized children + path_str = str(child.path) + if "$" in path_str or "{{" in path_str: + continue + + # Repair incorrect paths obtained when old syntax was used, like: + # - include: simpletask.yml tags=nginx + valid_tokens = [] + for token in split_args(path_str): + if "=" in token: + break + valid_tokens.append(token) + path = " ".join(valid_tokens) + if path != path_str: + child.path = Path(path) + child.name = child.path.name + + results.append(child) + return results + + +def template( + basedir: str, + value: Any, + variables: Any, + fail_on_error: bool = False, + fail_on_undefined: bool = False, + **kwargs: str, +) -> Any: + """Attempt rendering a value with known vars.""" + try: + value = ansible_template( + os.path.abspath(basedir), + value, + variables, + **dict(kwargs, fail_on_undefined=fail_on_undefined), + ) + # Hack to skip the following exception when using to_json filter on a variable. + # I guess the filter doesn't like empty vars... + except (AnsibleError, ValueError, RepresenterError): + # templating failed, so just keep value as is. + if fail_on_error: + raise + return value + + +def play_children( + basedir: str, item: tuple[str, Any], parent_type: FileType, playbook_dir: str +) -> list[Lintable]: + """Flatten the traversed play tasks.""" + # pylint: disable=unused-argument + delegate_map: dict[str, Callable[[str, Any, Any, FileType], list[Lintable]]] = { + "tasks": _taskshandlers_children, + "pre_tasks": _taskshandlers_children, + "post_tasks": _taskshandlers_children, + "block": _taskshandlers_children, + "include": _include_children, + "ansible.builtin.include": _include_children, + "import_playbook": _include_children, + "ansible.builtin.import_playbook": _include_children, + "roles": _roles_children, + "dependencies": _roles_children, + "handlers": _taskshandlers_children, + "include_tasks": _include_children, + "ansible.builtin.include_tasks": _include_children, + "import_tasks": _include_children, + "ansible.builtin.import_tasks": _include_children, + } + (k, v) = item + add_all_plugin_dirs(os.path.abspath(basedir)) + + if k in delegate_map: + if v: + v = template( + os.path.abspath(basedir), + v, + {"playbook_dir": PLAYBOOK_DIR or os.path.abspath(basedir)}, + fail_on_undefined=False, + ) + return delegate_map[k](basedir, k, v, parent_type) + return [] + + +def _include_children( + basedir: str, k: str, v: Any, parent_type: FileType +) -> list[Lintable]: + # handle special case include_tasks: name=filename.yml + if k in INCLUSION_ACTION_NAMES and isinstance(v, dict) and "file" in v: + v = v["file"] + + # we cannot really parse any jinja2 in includes, so we ignore them + if not v or "{{" in v: + return [] + + if "import_playbook" in k and COLLECTION_PLAY_RE.match(v): + # Any import_playbooks from collections should be ignored as ansible + # own syntax check will handle them. + return [] + + # handle include: filename.yml tags=blah + # pylint: disable=unused-variable + (command, args, kwargs) = tokenize(f"{k}: {v}") + + result = path_dwim(basedir, args[0]) + while basedir not in ["", "/"]: + if os.path.exists(result): + break + basedir = os.path.dirname(basedir) + result = path_dwim(basedir, args[0]) + + return [Lintable(result, kind=parent_type)] + + +def _taskshandlers_children( + basedir: str, k: str, v: None | Any, parent_type: FileType +) -> list[Lintable]: + results: list[Lintable] = [] + if v is None: + raise MatchError( + message="A malformed block was encountered while loading a block.", + rule=RuntimeErrorRule(), + ) + for task_handler in v: + # ignore empty tasks, `-` + if not task_handler: + continue + + with contextlib.suppress(LookupError): + children = _get_task_handler_children_for_tasks_or_playbooks( + task_handler, + basedir, + k, + parent_type, + ) + results.append(children) + continue + + if any(x in task_handler for x in ROLE_IMPORT_ACTION_NAMES): + # lgtm [py/unreachable-statement] + task_handler = normalize_task_v2(task_handler) + _validate_task_handler_action_for_role(task_handler["action"]) + results.extend( + _roles_children( + basedir, + k, + [task_handler["action"].get("name")], + parent_type, + main=task_handler["action"].get("tasks_from", "main"), + ) + ) + continue + + if "block" not in task_handler: + continue + + results.extend( + _taskshandlers_children(basedir, k, task_handler["block"], parent_type) + ) + if "rescue" in task_handler: + results.extend( + _taskshandlers_children(basedir, k, task_handler["rescue"], parent_type) + ) + if "always" in task_handler: + results.extend( + _taskshandlers_children(basedir, k, task_handler["always"], parent_type) + ) + + return results + + +def _get_task_handler_children_for_tasks_or_playbooks( + task_handler: dict[str, Any], + basedir: str, + k: Any, + parent_type: FileType, +) -> Lintable: + """Try to get children of taskhandler for include/import tasks/playbooks.""" + child_type = k if parent_type == "playbook" else parent_type + + # Include the FQCN task names as this happens before normalize + for task_handler_key in INCLUSION_ACTION_NAMES: + with contextlib.suppress(KeyError): + # ignore empty tasks + if not task_handler: # pragma: no branch + continue + + file_name = task_handler[task_handler_key] + if isinstance(file_name, Mapping) and file_name.get("file", None): + file_name = file_name["file"] + + f = path_dwim(basedir, file_name) + while basedir not in ["", "/"]: + if os.path.exists(f): + break + basedir = os.path.dirname(basedir) + f = path_dwim(basedir, file_name) + return Lintable(f, kind=child_type) + + raise LookupError( + f'The node contains none of: {", ".join(sorted(INCLUSION_ACTION_NAMES))}', + ) + + +def _validate_task_handler_action_for_role(th_action: dict[str, Any]) -> None: + """Verify that the task handler action is valid for role include.""" + module = th_action["__ansible_module__"] + + if "name" not in th_action: + raise MatchError(message=f"Failed to find required 'name' key in {module!s}") + + if not isinstance(th_action["name"], str): + raise MatchError( + message=f"Value assigned to 'name' key on '{module!s}' is not a string.", + ) + + +def _roles_children( + basedir: str, k: str, v: Sequence[Any], parent_type: FileType, main: str = "main" +) -> list[Lintable]: + # pylint: disable=unused-argument # parent_type) + results: list[Lintable] = [] + if not v: + # typing does not prevent junk from being passed in + return results + for role in v: + if isinstance(role, dict): + if "role" in role or "name" in role: + if "tags" not in role or "skip_ansible_lint" not in role["tags"]: + results.extend( + _look_for_role_files( + basedir, role.get("role", role.get("name")), main=main + ) + ) + elif k != "dependencies": + raise SystemExit( + f'role dict {role} does not contain a "role" or "name" key' + ) + else: + results.extend(_look_for_role_files(basedir, role, main=main)) + return results + + +def _rolepath(basedir: str, role: str) -> str | None: + role_path = None + + possible_paths = [ + # if included from a playbook + path_dwim(basedir, os.path.join("roles", role)), + path_dwim(basedir, role), + # if included from roles/[role]/meta/main.yml + path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)), + path_dwim(basedir, os.path.join("..", "..", role)), + # if checking a role in the current directory + path_dwim(basedir, os.path.join("..", role)), + ] + + for loc in get_app().runtime.config.default_roles_path: + loc = os.path.expanduser(loc) + possible_paths.append(path_dwim(loc, role)) + + possible_paths.append(path_dwim(basedir, "")) + + for path_option in possible_paths: # pragma: no branch + if os.path.isdir(path_option): + role_path = path_option + break + + if role_path: # pragma: no branch + add_all_plugin_dirs(role_path) + + return role_path + + +def _look_for_role_files( + basedir: str, role: str, main: str | None = "main" +) -> list[Lintable]: + # pylint: disable=unused-argument # main + role_path = _rolepath(basedir, role) + if not role_path: # pragma: no branch + return [] + + results = [] + + for kind in ["tasks", "meta", "handlers", "vars", "defaults"]: + current_path = os.path.join(role_path, kind) + for folder, _, files in os.walk(current_path): + for file in files: + file_ignorecase = file.lower() + if file_ignorecase.endswith((".yml", ".yaml")): + results.append(Lintable(os.path.join(folder, file))) + + return results + + +def _kv_to_dict(v: str) -> dict[str, Any]: + (command, args, kwargs) = tokenize(v) + return {"__ansible_module__": command, "__ansible_arguments__": args, **kwargs} + + +def _sanitize_task(task: dict[str, Any]) -> dict[str, Any]: + """Return a stripped-off task structure compatible with new Ansible. + + This helper takes a copy of the incoming task and drops + any internally used keys from it. + """ + result = task.copy() + # task is an AnsibleMapping which inherits from OrderedDict, so we need + # to use `del` to remove unwanted keys. + for k in [SKIPPED_RULES_KEY, FILENAME_KEY, LINE_NUMBER_KEY]: + if k in result: + del result[k] + return result + + +def _extract_ansible_parsed_keys_from_task( + result: dict[str, Any], + task: dict[str, Any], + keys: tuple[str, ...], +) -> dict[str, Any]: + """Return a dict with existing key in task.""" + for k, v in list(task.items()): + if k in keys: + # we don't want to re-assign these values, which were + # determined by the ModuleArgsParser() above + continue + result[k] = v + return result + + +def normalize_task_v2(task: dict[str, Any]) -> dict[str, Any]: + """Ensure tasks have a normalized action key and strings are converted to python objects.""" + result: dict[str, Any] = {} + ansible_parsed_keys = ("action", "local_action", "args", "delegate_to") + + if is_nested_task(task): + _extract_ansible_parsed_keys_from_task(result, task, ansible_parsed_keys) + # Add dummy action for block/always/rescue statements + result["action"] = { + "__ansible_module__": "block/always/rescue", + "__ansible_module_original__": "block/always/rescue", + } + + return result + + sanitized_task = _sanitize_task(task) + mod_arg_parser = ModuleArgsParser(sanitized_task) + + try: + action, arguments, result["delegate_to"] = mod_arg_parser.parse( + skip_action_validation=options.skip_action_validation + ) + except AnsibleParserError as exc: + # pylint: disable=raise-missing-from + raise MatchError( + rule=AnsibleParserErrorRule(), + message=exc.message, + filename=task.get(FILENAME_KEY, "Unknown"), + linenumber=task.get(LINE_NUMBER_KEY, 0), + ) + + # denormalize shell -> command conversion + if "_uses_shell" in arguments: + action = "shell" + del arguments["_uses_shell"] + + _extract_ansible_parsed_keys_from_task( + result, task, ansible_parsed_keys + (action,) + ) + + if not isinstance(action, str): + raise RuntimeError(f"Task actions can only be strings, got {action}") + action_unnormalized = action + # convert builtin fqn calls to short forms because most rules know only + # about short calls but in the future we may switch the normalization to do + # the opposite. Mainly we currently consider normalized the module listing + # used by `ansible-doc -t module -l 2>/dev/null` + action = removeprefix(action, "ansible.builtin.") + result["action"] = { + "__ansible_module__": action, + "__ansible_module_original__": action_unnormalized, + } + + if "_raw_params" in arguments: + # Doing a split here is really bad as it would break jinja2 templating + # parsing of the template must happen before any kind of split. + result["action"]["__ansible_arguments__"] = [arguments["_raw_params"]] + del arguments["_raw_params"] + else: + result["action"]["__ansible_arguments__"] = [] + + if "argv" in arguments and not result["action"]["__ansible_arguments__"]: + result["action"]["__ansible_arguments__"] = arguments["argv"] + del arguments["argv"] + + result["action"].update(arguments) + return result + + +def normalize_task(task: dict[str, Any], filename: str) -> dict[str, Any]: + """Unify task-like object structures.""" + ansible_action_type = task.get("__ansible_action_type__", "task") + if "__ansible_action_type__" in task: + del task["__ansible_action_type__"] + task = normalize_task_v2(task) + task[FILENAME_KEY] = filename + task["__ansible_action_type__"] = ansible_action_type + return task + + +def task_to_str(task: dict[str, Any]) -> str: + """Make a string identifier for the given task.""" + name = task.get("name") + if name: + return str(name) + action = task.get("action") + if isinstance(action, str) or not isinstance(action, dict): + return str(action) + args = [ + f"{k}={v}" + for (k, v) in action.items() + if k + not in [ + "__ansible_module__", + "__ansible_module_original__", + "__ansible_arguments__", + LINE_NUMBER_KEY, + FILENAME_KEY, + ] + ] + + for item in action.get("__ansible_arguments__", []): + args.append(str(item)) + + return f"{action['__ansible_module__']} {' '.join(args)}" + + +def extract_from_list( + blocks: AnsibleBaseYAMLObject, candidates: list[str], recursive: bool = False +) -> list[Any]: + """Get action tasks from block structures.""" + results = [] + for block in blocks: + for candidate in candidates: + if isinstance(block, dict) and candidate in block: + if isinstance(block[candidate], list): + subresults = add_action_type(block[candidate], candidate) + if recursive: + subresults.extend( + extract_from_list(subresults, candidates, recursive) + ) + results.extend(subresults) + elif block[candidate] is not None: + raise RuntimeError( + f"Key '{candidate}' defined, but bad value: '{str(block[candidate])}'" + ) + return results + + +def add_action_type(actions: AnsibleBaseYAMLObject, action_type: str) -> list[Any]: + """Add action markers to task objects.""" + results = [] + for action in actions: + # ignore empty task + if not action: + continue + action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type] + results.append(action) + return results + + +def get_action_tasks(data: AnsibleBaseYAMLObject, file: Lintable) -> list[Any]: + """Get a flattened list of action tasks from the file.""" + tasks = [] + if file.kind in ["tasks", "handlers"]: + tasks = add_action_type(data, file.kind) + else: + tasks.extend(extract_from_list(data, PLAYBOOK_TASK_KEYWORDS)) + + # Add sub-elements of block/rescue/always to tasks list + tasks.extend(extract_from_list(tasks, NESTED_TASK_KEYS, recursive=True)) + + return tasks + + +@lru_cache(maxsize=None) +def parse_yaml_linenumbers( # noqa: max-complexity: 12 + lintable: Lintable, +) -> AnsibleBaseYAMLObject: + """Parse yaml as ansible.utils.parse_yaml but with linenumbers. + + The line numbers are stored in each node's LINE_NUMBER_KEY key. + """ + result = [] + + def compose_node(parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: + # the line number where the previous token has ended (plus empty lines) + line = loader.line + node = Composer.compose_node(loader, parent, index) + if not isinstance(node, yaml.nodes.Node): + raise RuntimeError("Unexpected yaml data.") + setattr(node, "__line__", line + 1) + return node + + def construct_mapping( + node: AnsibleBaseYAMLObject, deep: bool = False + ) -> AnsibleMapping: + mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) + if hasattr(node, "__line__"): + mapping[LINE_NUMBER_KEY] = node.__line__ + else: + mapping[ + LINE_NUMBER_KEY + ] = mapping._line_number # pylint: disable=protected-access + mapping[FILENAME_KEY] = lintable.path + return mapping + + try: + kwargs = {} + if "vault_password" in inspect.getfullargspec(AnsibleLoader.__init__).args: + kwargs["vault_password"] = DEFAULT_VAULT_PASSWORD + loader = AnsibleLoader(lintable.content, **kwargs) + loader.compose_node = compose_node + loader.construct_mapping = construct_mapping + # while Ansible only accepts single documents, we also need to load + # multi-documents, as we attempt to load any YAML file, not only + # Ansible managed ones. + while True: + data = loader.get_data() + if data is None: + break + result.append(data) + except ( + yaml.parser.ParserError, + yaml.scanner.ScannerError, + yaml.constructor.ConstructorError, + ) as exc: + raise RuntimeError("Failed to load YAML file") from exc + + if len(result) == 0: + return None # empty documents + if len(result) == 1: + return result[0] + return result + + +def get_first_cmd_arg(task: dict[str, Any]) -> Any: + """Extract the first arg from a cmd task.""" + try: + if "cmd" in task["action"]: + first_cmd_arg = task["action"]["cmd"].split()[0] + else: + first_cmd_arg = task["action"]["__ansible_arguments__"][0].split()[0] + except IndexError: + return None + return first_cmd_arg + + +def get_second_cmd_arg(task: dict[str, Any]) -> Any: + """Extract the second arg from a cmd task.""" + try: + if "cmd" in task["action"]: + second_cmd_arg = task["action"]["cmd"].split()[1] + else: + second_cmd_arg = task["action"]["__ansible_arguments__"][0].split()[1] + except IndexError: + return None + return second_cmd_arg + + +def is_playbook(filename: str) -> bool: + """ + Check if the file is a playbook. + + Given a filename, it should return true if it looks like a playbook. The + function is not supposed to raise exceptions. + """ + # we assume is a playbook if we loaded a sequence of dictionaries where + # at least one of these keys is present: + playbooks_keys = { + "gather_facts", + "hosts", + "import_playbook", + "post_tasks", + "pre_tasks", + "roles", + "tasks", + } + + # makes it work with Path objects by converting them to strings + if not isinstance(filename, str): + filename = str(filename) + + try: + f = parse_yaml_from_file(filename) + except Exception as exc: # pylint: disable=broad-except + _logger.warning( + "Failed to load %s with %s, assuming is not a playbook.", filename, exc + ) + else: + if ( + isinstance(f, AnsibleSequence) + and hasattr(next(iter(f), {}), "keys") + and playbooks_keys.intersection(next(iter(f), {}).keys()) + ): + return True + return False + + +# pylint: disable=too-many-statements +def get_lintables( + opts: Namespace = Namespace(), args: list[str] | None = None +) -> list[Lintable]: + """Detect files and directories that are lintable.""" + lintables: list[Lintable] = [] + + # passing args bypass auto-detection mode + if args: + for arg in args: + lintable = Lintable(arg) + lintables.append(lintable) + else: + for filename in discover_lintables(opts): + path = Path(filename) + # skip exclusions + try: + for file_path in opts.exclude_paths: + if str(path.resolve()).startswith(str(file_path)): + raise FileNotFoundError( + f"File {file_path} matched exclusion entry: {path}" + ) + except FileNotFoundError as exc: + _logger.debug("Ignored %s due to: %s", path, exc) + continue + + if path.is_symlink() and not path.exists(): + _logger.warning("Ignored broken symlink %s -> %s", path, path.resolve()) + continue + + lintables.append(Lintable(path)) + + # stage 2: guess roles from current lintables, as there is no unique + # file that must be present in any kind of role. + _extend_with_roles(lintables) + + return lintables + + +def _extend_with_roles(lintables: list[Lintable]) -> None: + """Detect roles among lintables and adds them to the list.""" + for lintable in lintables: + parts = lintable.path.parent.parts + if "roles" in parts: + role = lintable.path + while role.parent.name != "roles" and role.name: + role = role.parent + if role.exists() and not role.is_file(): + lintable = Lintable(role, kind="role") + if lintable not in lintables: + _logger.debug("Added role: %s", lintable) + lintables.append(lintable) + + +def convert_to_boolean(value: Any) -> bool: + """Use Ansible to convert something to a boolean.""" + return bool(boolean(value)) + + +def nested_items( + data: dict[Any, Any] | list[Any], parent: str = "" +) -> Generator[tuple[Any, Any, str], None, None]: + """Iterate a nested data structure.""" + warnings.warn( + "Call to deprecated function ansiblelint.utils.nested_items. " + "Use ansiblelint.yaml_utils.nested_items_path instead.", + category=DeprecationWarning, + stacklevel=2, + ) + if isinstance(data, dict): + for k, v in data.items(): + yield k, v, parent + # pylint: disable=redefined-outer-name + for k, v, returned_parent in nested_items(v, k): + yield k, v, returned_parent + if isinstance(data, list): + for item in data: + yield "list-item", item, parent + for k, v, returned_parent in nested_items(item): + yield k, v, returned_parent diff --git a/src/ansiblelint/version.py b/src/ansiblelint/version.py new file mode 100644 index 0000000..4967ced --- /dev/null +++ b/src/ansiblelint/version.py @@ -0,0 +1,14 @@ +"""Ansible-lint version information.""" +try: + from ._version import version as __version__ +except ImportError: # pragma: no cover + try: + import pkg_resources + + __version__ = pkg_resources.get_distribution("ansible-lint").version + except Exception: # pylint: disable=broad-except + # this is the fallback SemVer version picked by setuptools_scm when tag + # information is not available. + __version__ = "0.1.dev1" + +__all__ = ("__version__",) diff --git a/src/ansiblelint/yaml_utils.py b/src/ansiblelint/yaml_utils.py new file mode 100644 index 0000000..ca8a2e1 --- /dev/null +++ b/src/ansiblelint/yaml_utils.py @@ -0,0 +1,1146 @@ +"""Utility helpers to simplify working with yaml-based data.""" +# pylint: disable=too-many-lines +from __future__ import annotations + +import functools +import logging +import os +import re +from io import StringIO +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + Pattern, + Sequence, + Tuple, + Union, + cast, +) + +import ruamel.yaml.events +from ruamel.yaml.comments import CommentedMap, CommentedSeq, Format +from ruamel.yaml.constructor import RoundTripConstructor +from ruamel.yaml.emitter import Emitter, ScalarAnalysis + +# Module 'ruamel.yaml' does not explicitly export attribute 'YAML'; implicit reexport disabled +# To make the type checkers happy, we import from ruamel.yaml.main instead. +from ruamel.yaml.main import YAML +from ruamel.yaml.nodes import ScalarNode +from ruamel.yaml.representer import RoundTripRepresenter +from ruamel.yaml.scalarint import ScalarInt +from ruamel.yaml.tokens import CommentToken +from yamllint.config import YamlLintConfig + +from ansiblelint.constants import ( + ANNOTATION_KEYS, + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + SKIPPED_RULES_KEY, +) +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.utils import get_action_tasks, normalize_task + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from ruamel.yaml.comments import LineCol # pylint: disable=ungrouped-imports + +_logger = logging.getLogger(__name__) + +YAMLLINT_CONFIG = """ +extends: default +rules: + comments: + # https://github.com/prettier/prettier/issues/6780 + min-spaces-from-content: 1 + # https://github.com/adrienverge/yamllint/issues/384 + comments-indentation: false + document-start: disable + # 160 chars was the default used by old E204 rule, but + # you can easily change it or disable in your .yamllint file. + line-length: + max: 160 + # We are adding an extra space inside braces as that's how prettier does it + # and we are trying not to fight other linters. + braces: + min-spaces-inside: 0 # yamllint defaults to 0 + max-spaces-inside: 1 # yamllint defaults to 0 + octal-values: + forbid-implicit-octal: true # yamllint defaults to false + forbid-explicit-octal: true # yamllint defaults to false +""" + + +def deannotate(data: Any) -> Any: + """Remove our annotations like __file__ and __line__ and return a JSON serializable object.""" + if isinstance(data, dict): + result = data.copy() + for key, value in data.items(): + if key in ANNOTATION_KEYS: + del result[key] + else: + result[key] = deannotate(value) + return result + if isinstance(data, list): + return [deannotate(item) for item in data if item not in ANNOTATION_KEYS] + return data + + +@functools.lru_cache(maxsize=1) +def load_yamllint_config() -> YamlLintConfig: + """Load our default yamllint config and any customized override file.""" + config = YamlLintConfig(content=YAMLLINT_CONFIG) + # if we detect local yamllint config we use it but raise a warning + # as this is likely to get out of sync with our internal config. + for file in [ + ".yamllint", + ".yamllint.yaml", + ".yamllint.yml", + os.getenv("YAMLLINT_CONFIG_FILE", ""), + os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")) + + "/yamllint/config", + ]: + if os.path.isfile(file): + _logger.debug( + "Loading custom %s config file, this extends our " + "internal yamllint config.", + file, + ) + config_override = YamlLintConfig(file=file) + config_override.extend(config) + config = config_override + break + _logger.debug("Effective yamllint rules used: %s", config.rules) + return config + + +def iter_tasks_in_file( + lintable: Lintable, +) -> Iterator[tuple[dict[str, Any], dict[str, Any], list[str], MatchError | None]]: + """Iterate over tasks in file. + + This yields a 4-tuple of raw_task, normalized_task, skip_tags, and error. + + raw_task: + When looping through the tasks in the file, each "raw_task" is minimally + processed to include these special keys: __line__, __file__, skipped_rules. + normalized_task: + When each raw_task is "normalized", action shorthand (strings) get parsed + by ansible into python objects and the action key gets normalized. If the task + should be skipped (skipped is True) or normalizing it fails (error is not None) + then this is just the raw_task instead of a normalized copy. + skip_tags: + List of tags found to be skipped, from tags block or noqa comments + error: + This is normally None. It will be a MatchError when the raw_task cannot be + normalized due to an AnsibleParserError. + + :param lintable: The playbook or tasks/handlers yaml file to get tasks from + + Yields raw_task, normalized_task, skipped, error + """ + data = lintable.data + if not data: + return + + raw_tasks = get_action_tasks(data, lintable) + + for raw_task in raw_tasks: + err: MatchError | None = None + + skip_tags: list[str] = raw_task.get(SKIPPED_RULES_KEY, []) + + try: + normalized_task = normalize_task(raw_task, str(lintable.path)) + except MatchError as err: + # normalize_task converts AnsibleParserError to MatchError + yield raw_task, raw_task, skip_tags, err + return + + if "skip_ansible_lint" in raw_task.get("tags", []): + skip_tags.append("skip_ansible_lint") + if skip_tags: + yield raw_task, normalized_task, skip_tags, err + continue + + yield raw_task, normalized_task, skip_tags, err + + +def nested_items_path( + data_collection: dict[Any, Any] | list[Any], + ignored_keys: Sequence[str] = (), +) -> Iterator[tuple[Any, Any, list[str | int]]]: + """Iterate a nested data structure, yielding key/index, value, and parent_path. + + This is a recursive function that calls itself for each nested layer of data. + Each iteration yields: + + 1. the current item's dictionary key or list index, + 2. the current item's value, and + 3. the path to the current item from the outermost data structure. + + For dicts, the yielded (1) key and (2) value are what ``dict.items()`` yields. + For lists, the yielded (1) index and (2) value are what ``enumerate()`` yields. + The final component, the parent path, is a list of dict keys and list indexes. + The parent path can be helpful in providing error messages that indicate + precisely which part of a yaml file (or other data structure) needs to be fixed. + + For example, given this playbook: + + .. code-block:: yaml + + - name: A play + tasks: + - name: A task + debug: + msg: foobar + + Here's the first and last yielded items: + + .. code-block:: python + + >>> playbook=[{"name": "a play", "tasks": [{"name": "a task", "debug": {"msg": "foobar"}}]}] + >>> next( nested_items_path( playbook ) ) + (0, {'name': 'a play', 'tasks': [{'name': 'a task', 'debug': {'msg': 'foobar'}}]}, []) + >>> list( nested_items_path( playbook ) )[-1] + ('msg', 'foobar', [0, 'tasks', 0, 'debug']) + + Note that, for outermost data structure, the parent path is ``[]`` because + you do not need to descend into any nested dicts or lists to find the indicated + key and value. + + If a rule were designed to prohibit "foobar" debug messages, it could use the + parent path to provide a path to the problematic ``msg``. It might use a jq-style + path in its error message: "the error is at ``.[0].tasks[0].debug.msg``". + Or if a utility could automatically fix issues, it could use the path to descend + to the parent object using something like this: + + .. code-block:: python + + target = data + for segment in parent_path: + target = target[segment] + + :param data_collection: The nested data (dicts or lists). + + :returns: each iteration yields the key (of the parent dict) or the index (lists) + """ + # As typing and mypy cannot effectively ensure we are called only with + # valid data, we better ignore NoneType + if data_collection is None: + return + yield from _nested_items_path( + data_collection=data_collection, parent_path=[], ignored_keys=ignored_keys + ) + + +def _nested_items_path( + data_collection: dict[Any, Any] | list[Any], + parent_path: list[str | int], + ignored_keys: Sequence[str] = (), +) -> Iterator[tuple[Any, Any, list[str | int]]]: + """Iterate through data_collection (internal implementation of nested_items_path). + + This is a separate function because callers of nested_items_path should + not be using the parent_path param which is used in recursive _nested_items_path + calls to build up the path to the parent object of the current key/index, value. + """ + # we have to cast each convert_to_tuples assignment or mypy complains + # that both assignments (for dict and list) do not have the same type + convert_to_tuples_type = Callable[[], Iterator[Tuple[Union[str, int], Any]]] + if isinstance(data_collection, dict): + convert_data_collection_to_tuples = cast( + convert_to_tuples_type, functools.partial(data_collection.items) + ) + elif isinstance(data_collection, list): + convert_data_collection_to_tuples = cast( + convert_to_tuples_type, functools.partial(enumerate, data_collection) + ) + else: + raise TypeError( + f"Expected a dict or a list but got {data_collection!r} " + f"of type '{type(data_collection)}'" + ) + for key, value in convert_data_collection_to_tuples(): + if key in (SKIPPED_RULES_KEY, "__file__", "__line__", *ignored_keys): + continue + yield key, value, parent_path + if isinstance(value, (dict, list)): + yield from _nested_items_path( + data_collection=value, parent_path=parent_path + [key] + ) + + +def get_path_to_play( + lintable: Lintable, + line_number: int, # 1-based + ruamel_data: CommentedMap | CommentedSeq, +) -> list[str | int]: + """Get the path to the play in the given file at the given line number.""" + if line_number < 1: + raise ValueError(f"expected line_number >= 1, got {line_number}") + if lintable.kind != "playbook" or not isinstance(ruamel_data, CommentedSeq): + return [] + lc: LineCol # lc uses 0-based counts # pylint: disable=invalid-name + # line_number is 1-based. Convert to 0-based. + line_index = line_number - 1 + + prev_play_line_index = ruamel_data.lc.line + last_play_index = len(ruamel_data) + for play_index, play in enumerate(ruamel_data): + next_play_index = play_index + 1 + if last_play_index > next_play_index: + next_play_line_index = ruamel_data[next_play_index].lc.line + else: + next_play_line_index = None + + lc = play.lc # pylint: disable=invalid-name + assert isinstance(lc.line, int) + if lc.line == line_index: + return [play_index] + if play_index > 0 and prev_play_line_index < line_index < lc.line: + return [play_index - 1] + # The previous play check (above) can't catch the last play, + # so, handle the last play separately. + if ( + next_play_index == last_play_index + and line_index > lc.line + and (next_play_line_index is None or line_index < next_play_line_index) + ): + # part of this (last) play + return [play_index] + prev_play_line_index = play.lc.line + return [] + + +def get_path_to_task( + lintable: Lintable, + line_number: int, # 1-based + ruamel_data: CommentedMap | CommentedSeq, +) -> list[str | int]: + """Get the path to the task in the given file at the given line number.""" + if line_number < 1: + raise ValueError(f"expected line_number >= 1, got {line_number}") + if lintable.kind in ("tasks", "handlers"): + assert isinstance(ruamel_data, CommentedSeq) + return _get_path_to_task_in_tasks_block(line_number, ruamel_data) + if lintable.kind == "playbook": + assert isinstance(ruamel_data, CommentedSeq) + return _get_path_to_task_in_playbook(line_number, ruamel_data) + # if lintable.kind in ["yaml", "requirements", "vars", "meta", "reno", "test-meta"]: + + return [] + + +def _get_path_to_task_in_playbook( + line_number: int, # 1-based + ruamel_data: CommentedSeq, +) -> list[str | int]: + """Get the path to the task in the given playbook data at the given line number.""" + last_play_index = len(ruamel_data) + for play_index, play in enumerate(ruamel_data): + next_play_index = play_index + 1 + if last_play_index > next_play_index: + next_play_line_index = ruamel_data[next_play_index].lc.line + else: + next_play_line_index = None + + play_keys = list(play.keys()) + for tasks_keyword in PLAYBOOK_TASK_KEYWORDS: + if not play.get(tasks_keyword): + continue + + try: + next_keyword = play_keys[play_keys.index(tasks_keyword) + 1] + except IndexError: + next_block_line_index = None + else: + next_block_line_index = play.lc.data[next_keyword][0] + # last_line_number_in_block is 1-based; next_*_line_index is 0-based + # next_*_line_index - 1 to get line before next_*_line_index. + # Then + 1 to make it a 1-based number. + # So, last_line_number_in_block = next_*_line_index - 1 + 1 + if next_block_line_index is not None: + last_line_number_in_block = next_block_line_index + elif next_play_line_index is not None: + last_line_number_in_block = next_play_line_index + else: + last_line_number_in_block = None + + task_path = _get_path_to_task_in_tasks_block( + line_number, play[tasks_keyword], last_line_number_in_block + ) + if task_path: + # mypy gets confused without this typehint + tasks_keyword_path: list[int | str] = [ + play_index, + tasks_keyword, + ] + return tasks_keyword_path + list(task_path) + # line_number is before first play or no tasks keywords in any of the plays + return [] + + +def _get_path_to_task_in_tasks_block( + line_number: int, # 1-based + tasks_block: CommentedSeq, + last_line_number: int | None = None, # 1-based +) -> list[str | int]: + """Get the path to the task in the given tasks block at the given line number.""" + task: CommentedMap | None + # line_number and last_line_number are 1-based. Convert to 0-based. + line_index = line_number - 1 + last_line_index = None if last_line_number is None else last_line_number - 1 + + # lc (LineCol) uses 0-based counts + prev_task_line_index = tasks_block.lc.line + last_task_index = len(tasks_block) + for task_index, task in enumerate(tasks_block): + next_task_index = task_index + 1 + if last_task_index > next_task_index: + if tasks_block[next_task_index] is not None: + next_task_line_index = tasks_block[next_task_index].lc.line + else: + next_task_line_index = tasks_block.lc.item(next_task_index)[0] + else: + next_task_line_index = None + + if task is None: + # create a dummy task to represent the null task + task = CommentedMap() + task.lc.line, task.lc.col = tasks_block.lc.item(task_index) + + nested_task_keys = set(task.keys()).intersection(set(NESTED_TASK_KEYS)) + if nested_task_keys: + subtask_path = _get_path_to_task_in_nested_tasks_block( + line_number, task, nested_task_keys, next_task_line_index + ) + if subtask_path: + # mypy gets confused without this typehint + task_path: list[str | int] = [task_index] + return task_path + list(subtask_path) + + assert isinstance(task.lc.line, int) + if task.lc.line == line_index: + return [task_index] + if task_index > 0 and prev_task_line_index < line_index < task.lc.line: + return [task_index - 1] + # The previous task check can't catch the last task, + # so, handle the last task separately (also after subtask checks). + # pylint: disable=too-many-boolean-expressions + if ( + next_task_index == last_task_index + and line_index > task.lc.line + and (next_task_line_index is None or line_index < next_task_line_index) + and (last_line_index is None or line_index <= last_line_index) + ): + # part of this (last) task + return [task_index] + prev_task_line_index = task.lc.line + # line is not part of this tasks block + return [] + + +def _get_path_to_task_in_nested_tasks_block( + line_number: int, # 1-based + task: CommentedMap, + nested_task_keys: set[str], + next_task_line_index: int | None = None, # 0-based +) -> list[str | int]: + """Get the path to the task in the given nested tasks block.""" + # loop through the keys in line order + task_keys = list(task.keys()) + task_keys_by_index = dict(enumerate(task_keys)) + for task_index, task_key in enumerate(task_keys): + nested_task_block = task[task_key] + if task_key not in nested_task_keys or not nested_task_block: + continue + next_task_key = task_keys_by_index.get(task_index + 1, None) + if next_task_key is not None: + next_task_key_line_index = task.lc.data[next_task_key][0] + else: + next_task_key_line_index = None + # last_line_number_in_block is 1-based; next_*_line_index is 0-based + # next_*_line_index - 1 to get line before next_*_line_index. + # Then + 1 to make it a 1-based number. + # So, last_line_number_in_block = next_*_line_index - 1 + 1 + last_line_number_in_block = ( + next_task_key_line_index + if next_task_key_line_index is not None + else next_task_line_index + ) + subtask_path = _get_path_to_task_in_tasks_block( + line_number, + nested_task_block, + last_line_number_in_block, # 1-based + ) + if subtask_path: + return [task_key] + list(subtask_path) + # line is not part of this nested tasks block + return [] + + +class OctalIntYAML11(ScalarInt): + """OctalInt representation for YAML 1.1.""" + + # tell mypy that ScalarInt has these attributes + _width: Any + _underscore: Any + + def __new__(cls, *args: Any, **kwargs: Any) -> Any: + """Create a new int with ScalarInt-defined attributes.""" + return ScalarInt.__new__(cls, *args, **kwargs) + + @staticmethod + def represent_octal(representer: RoundTripRepresenter, data: OctalIntYAML11) -> Any: + """Return a YAML 1.1 octal representation. + + Based on ruamel.yaml.representer.RoundTripRepresenter.represent_octal_int() + (which only handles the YAML 1.2 octal representation). + """ + v = format(data, "o") + anchor = data.yaml_anchor(any=True) + # noinspection PyProtectedMember + # pylint: disable=protected-access + return representer.insert_underscore("0", v, data._underscore, anchor=anchor) + + +class CustomConstructor(RoundTripConstructor): + """Custom YAML constructor that preserves Octal formatting in YAML 1.1.""" + + def construct_yaml_int(self, node: ScalarNode) -> Any: + """Construct int while preserving Octal formatting in YAML 1.1. + + ruamel.yaml only preserves the octal format for YAML 1.2. + For 1.1, it converts the octal to an int. So, we preserve the format. + + Code partially copied from ruamel.yaml (MIT licensed). + """ + ret = super().construct_yaml_int(node) + if self.resolver.processing_version == (1, 1) and isinstance(ret, int): + # Do not rewrite zero as octal. + if ret == 0: + return ret + # see if we've got an octal we need to preserve. + value_su = self.construct_scalar(node) + try: + v = value_su.rstrip("_") + underscore = [len(v) - v.rindex("_") - 1, False, False] # type: Any + except ValueError: + underscore = None + except IndexError: + underscore = None + value_s = value_su.replace("_", "") + if value_s[0] in "+-": + value_s = value_s[1:] + if value_s[0] == "0": + # got an octal in YAML 1.1 + ret = OctalIntYAML11( + ret, width=None, underscore=underscore, anchor=node.anchor + ) + return ret + + +CustomConstructor.add_constructor( + "tag:yaml.org,2002:int", CustomConstructor.construct_yaml_int +) + + +class FormattedEmitter(Emitter): + """Emitter that applies custom formatting rules when dumping YAML. + + Differences from ruamel.yaml defaults: + + - indentation of root-level sequences + - prefer double-quoted scalars over single-quoted scalars + + This ensures that root-level sequences are never indented. + All subsequent levels are indented as configured (normal ruamel.yaml behavior). + + Earlier implementations used dedent on ruamel.yaml's dumped output, + but string magic like that had a ton of problematic edge cases. + """ + + preferred_quote = '"' # either " or ' + + min_spaces_inside = 0 + max_spaces_inside = 1 + + _sequence_indent = 2 + _sequence_dash_offset = 0 # Should be _sequence_indent - 2 + _root_is_sequence = False + + _in_empty_flow_map = False + + @property + def _is_root_level_sequence(self) -> bool: + """Return True if this is a sequence at the root level of the yaml document.""" + return self.column < 2 and self._root_is_sequence + + def expect_document_root(self) -> None: + """Expect doc root (extend to record if the root doc is a sequence).""" + self._root_is_sequence = isinstance( + self.event, ruamel.yaml.events.SequenceStartEvent + ) + return super().expect_document_root() + + # NB: mypy does not support overriding attributes with properties yet: + # https://github.com/python/mypy/issues/4125 + # To silence we have to ignore[override] both the @property and the method. + + @property + def best_sequence_indent(self) -> int: + """Return the configured sequence_indent or 2 for root level.""" + return 2 if self._is_root_level_sequence else self._sequence_indent + + @best_sequence_indent.setter + def best_sequence_indent(self, value: int) -> None: + """Configure how many columns to indent each sequence item (including the '-').""" + self._sequence_indent = value + + @property + def sequence_dash_offset(self) -> int: + """Return the configured sequence_dash_offset or 0 for root level.""" + return 0 if self._is_root_level_sequence else self._sequence_dash_offset + + @sequence_dash_offset.setter + def sequence_dash_offset(self, value: int) -> None: + """Configure how many spaces to put before each sequence item's '-'.""" + self._sequence_dash_offset = value + + def choose_scalar_style(self) -> Any: + """Select how to quote scalars if needed.""" + style = super().choose_scalar_style() + if ( + style == "" + and self.event.value.startswith("0") + and len(self.event.value) > 1 + ): + if self.event.tag == "tag:yaml.org,2002:int" and self.event.implicit[0]: + # ensures that "0123" string does not lose its quoting + self.event.tag = "tag:yaml.org,2002:str" + self.event.implicit = (True, True, True) + return '"' + if style != "'": + # block scalar, double quoted, etc. + return style + if '"' in self.event.value: + return "'" + return self.preferred_quote + + def write_indicator( + self, + indicator: str, # ruamel.yaml typehint is wrong. This is a string. + need_whitespace: bool, + whitespace: bool = False, + indention: bool = False, # (sic) ruamel.yaml has this typo in their API + ) -> None: + """Make sure that flow maps get whitespace by the curly braces.""" + # We try to go with one whitespace by the curly braces and adjust accordingly + # to what min_spaces_inside and max_spaces_inside are set to. + # This assumes min_spaces_inside <= max_spaces_inside + spaces_inside = min( + max(1, self.min_spaces_inside), + self.max_spaces_inside if self.max_spaces_inside != -1 else 1, + ) + # If this is the end of the flow mapping that isn't on a new line: + if ( + indicator == "}" + and (self.column or 0) > (self.indent or 0) + and not self._in_empty_flow_map + ): + indicator = (" " * spaces_inside) + "}" + super().write_indicator(indicator, need_whitespace, whitespace, indention) + # if it is the start of a flow mapping, and it's not time + # to wrap the lines, insert a space. + if indicator == "{" and self.column < self.best_width: + if self.check_empty_mapping(): + self._in_empty_flow_map = True + else: + self.column += 1 + self.stream.write(" " * spaces_inside) + self._in_empty_flow_map = False + + # "/n/n" results in one blank line (end the previous line, then newline). + # So, "/n/n/n" or more is too many new lines. Clean it up. + _re_repeat_blank_lines: Pattern[str] = re.compile(r"\n{3,}") + + @staticmethod + def add_octothorpe_protection(string: str) -> str: + """Modify strings to protect "#" from full-line-comment post-processing.""" + try: + if "#" in string: + # # is \uFF03 (fullwidth number sign) + # ﹟ is \uFE5F (small number sign) + string = string.replace("#", "\uFF03#\uFE5F") + # this is safe even if this sequence is present + # because it gets reversed in post-processing + except (ValueError, TypeError): + # probably not really a string. Whatever. + pass + return string + + @staticmethod + def drop_octothorpe_protection(string: str) -> str: + """Remove string protection of "#" after full-line-comment post-processing.""" + try: + if "\uFF03#\uFE5F" in string: + # # is \uFF03 (fullwidth number sign) + # ﹟ is \uFE5F (small number sign) + string = string.replace("\uFF03#\uFE5F", "#") + except (ValueError, TypeError): + # probably not really a string. Whatever. + pass + return string + + def analyze_scalar(self, scalar: str) -> ScalarAnalysis: + """Determine quoting and other requirements for string. + + And protect "#" from full-line-comment post-processing. + """ + analysis: ScalarAnalysis = super().analyze_scalar(scalar) + if analysis.empty: + return analysis + analysis.scalar = self.add_octothorpe_protection(analysis.scalar) + return analysis + + # comment is a CommentToken, not Any (Any is ruamel.yaml's lazy type hint). + def write_comment(self, comment: CommentToken, pre: bool = False) -> None: + """Clean up extra new lines and spaces in comments. + + ruamel.yaml treats new or empty lines as comments. + See: https://stackoverflow.com/questions/42708668/removing-all-blank-lines-but-not-comments-in-ruamel-yaml/42712747#42712747 + """ + value: str = comment.value + if ( + pre + and not value.strip() + and not isinstance( + self.event, + ( + ruamel.yaml.events.CollectionEndEvent, + ruamel.yaml.events.DocumentEndEvent, + ruamel.yaml.events.StreamEndEvent, + ), + ) + ): + # drop pure whitespace pre comments + # does not apply to End events since they consume one of the newlines. + value = "" + elif pre: + # preserve content in pre comment with at least one newline, + # but no extra blank lines. + value = self._re_repeat_blank_lines.sub("\n", value) + else: + # single blank lines in post comments + value = self._re_repeat_blank_lines.sub("\n\n", value) + comment.value = value + + # make sure that the eol comment only has one space before it. + if comment.column > self.column + 1 and not pre: + comment.column = self.column + 1 + + return super().write_comment(comment, pre) + + def write_version_directive(self, version_text: Any) -> None: + """Skip writing '%YAML 1.1'.""" + if version_text == "1.1": + return + super().write_version_directive(version_text) + + +# pylint: disable=too-many-instance-attributes +class FormattedYAML(YAML): + """A YAML loader/dumper that handles ansible content better by default.""" + + def __init__( + self, + *, + typ: str | None = None, + pure: bool = False, + output: Any = None, + # input: Any = None, + plug_ins: list[str] | None = None, + ): + """Return a configured ``ruamel.yaml.YAML`` instance. + + Some config defaults get extracted from the yamllint config. + + ``ruamel.yaml.YAML`` uses attributes to configure how it dumps yaml files. + Some of these settings can be confusing, so here are examples of how different + settings will affect the dumped yaml. + + This example does not indent any sequences: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=2 + yaml.sequence_dash_offset=0 + + .. code:: yaml + + --- + - name: A playbook + tasks: + - name: Task + + This example indents all sequences including the root-level: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=4 + yaml.sequence_dash_offset=2 + # yaml.Emitter defaults to ruamel.yaml.emitter.Emitter + + .. code:: yaml + + --- + - name: Playbook + tasks: + - name: Task + + This example indents all sequences except at the root-level: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=4 + yaml.sequence_dash_offset=2 + yaml.Emitter = FormattedEmitter # custom Emitter prevents root-level indents + + .. code:: yaml + + --- + - name: Playbook + tasks: + - name: Task + """ + # Default to reading/dumping YAML 1.1 (ruamel.yaml defaults to 1.2) + self._yaml_version_default: tuple[int, int] = (1, 1) + self._yaml_version: str | tuple[int, int] = self._yaml_version_default + + super().__init__(typ=typ, pure=pure, output=output, plug_ins=plug_ins) + + # NB: We ignore some mypy issues because ruamel.yaml typehints are not great. + + config = self._defaults_from_yamllint_config() + + # these settings are derived from yamllint config + self.explicit_start: bool = config["explicit_start"] # type: ignore[assignment] + self.explicit_end: bool = config["explicit_end"] # type: ignore[assignment] + self.width: int = config["width"] # type: ignore[assignment] + indent_sequences: bool = cast(bool, config["indent_sequences"]) + preferred_quote: str = cast(str, config["preferred_quote"]) # either ' or " + + min_spaces_inside: int = cast(int, config["min_spaces_inside"]) + max_spaces_inside: int = cast(int, config["max_spaces_inside"]) + + self.default_flow_style = False + self.compact_seq_seq = True # type: ignore[assignment] # dash after dash + self.compact_seq_map = True # type: ignore[assignment] # key after dash + + # Do not use yaml.indent() as it obscures the purpose of these vars: + self.map_indent = 2 # type: ignore[assignment] + self.sequence_indent = 4 if indent_sequences else 2 # type: ignore[assignment] + self.sequence_dash_offset = self.sequence_indent - 2 # type: ignore[operator] + + # If someone doesn't want our FormattedEmitter, they can change it. + self.Emitter = FormattedEmitter + + # ignore invalid preferred_quote setting + if preferred_quote in ['"', "'"]: + FormattedEmitter.preferred_quote = preferred_quote + # NB: default_style affects preferred_quote as well. + # self.default_style ∈ None (default), '', '"', "'", '|', '>' + + # spaces inside braces for flow mappings + FormattedEmitter.min_spaces_inside = min_spaces_inside + FormattedEmitter.max_spaces_inside = max_spaces_inside + + # We need a custom constructor to preserve Octal formatting in YAML 1.1 + self.Constructor = CustomConstructor + self.Representer.add_representer(OctalIntYAML11, OctalIntYAML11.represent_octal) + + # We should preserve_quotes loads all strings as a str subclass that carries + # a quote attribute. Will the str subclasses cause problems in transforms? + # Are there any other gotchas to this? + # + # This will only preserve quotes for strings read from the file. + # anything modified by the transform will use no quotes, preferred_quote, + # or the quote that results in the least amount of escaping. + # self.preserve_quotes = True + + # If needed, we can use this to change null representation to be explicit + # (see https://stackoverflow.com/a/44314840/1134951) + # self.Representer.add_representer( + # type(None), + # lambda self, data: self.represent_scalar("tag:yaml.org,2002:null", "null"), + # ) + + @staticmethod + def _defaults_from_yamllint_config() -> dict[str, bool | int | str]: + """Extract FormattedYAML-relevant settings from yamllint config if possible.""" + config = { + "explicit_start": True, + "explicit_end": False, + "width": 160, + "indent_sequences": True, + "preferred_quote": '"', + "min_spaces_inside": 0, + "max_spaces_inside": 1, + } + for rule, rule_config in load_yamllint_config().rules.items(): + if not rule_config: + # rule disabled + continue + + # refactor this if ... elif ... elif ... else monstrosity using match/case (PEP 634) once python 3.10 is mandatory + if rule == "document-start": + config["explicit_start"] = rule_config["present"] + elif rule == "document-end": + config["explicit_end"] = rule_config["present"] + elif rule == "line-length": + config["width"] = rule_config["max"] + elif rule == "braces": + min_spaces_inside = rule_config["min-spaces-inside"] + if min_spaces_inside: + config["min_spaces_inside"] = int(min_spaces_inside) + max_spaces_inside = rule_config["max-spaces-inside"] + if max_spaces_inside: + config["max_spaces_inside"] = int(max_spaces_inside) + elif rule == "indentation": + indent_sequences = rule_config["indent-sequences"] + # one of: bool, "whatever", "consistent" + # so, we use True for "whatever" and "consistent" + config["indent_sequences"] = bool(indent_sequences) + elif rule == "quoted-strings": + quote_type = rule_config["quote-type"] + # one of: single, double, any + if quote_type == "single": + config["preferred_quote"] = "'" + elif quote_type == "double": + config["preferred_quote"] = '"' + + return cast(Dict[str, Union[bool, int, str]], config) + + @property # type: ignore[override] + def version(self) -> str | tuple[int, int]: + """Return the YAML version used to parse or dump. + + Ansible uses PyYAML which only supports YAML 1.1. ruamel.yaml defaults to 1.2. + So, we have to make sure we dump yaml files using YAML 1.1. + We can relax the version requirement once ansible uses a version of PyYAML + that includes this PR: https://github.com/yaml/pyyaml/pull/555 + """ + return self._yaml_version + + @version.setter + def version(self, value: str | tuple[int, int] | None) -> None: + """Ensure that yaml version uses our default value. + + The yaml Reader updates this value based on the ``%YAML`` directive in files. + So, if a file does not include the directive, it sets this to None. + But, None effectively resets the parsing version to YAML 1.2 (ruamel's default). + """ + self._yaml_version = value if value is not None else self._yaml_version_default + + def loads(self, stream: str) -> Any: + """Load YAML content from a string while avoiding known ruamel.yaml issues.""" + if not isinstance(stream, str): + raise NotImplementedError(f"expected a str but got {type(stream)}") + text, preamble_comment = self._pre_process_yaml(stream) + data = self.load(stream=text) + if preamble_comment is not None: + setattr(data, "preamble_comment", preamble_comment) + return data + + def dumps(self, data: Any) -> str: + """Dump YAML document to string (including its preamble_comment).""" + preamble_comment: str | None = getattr(data, "preamble_comment", None) + self._prevent_wrapping_flow_style(data) + with StringIO() as stream: + if preamble_comment: + stream.write(preamble_comment) + self.dump(data, stream) + text = stream.getvalue() + return self._post_process_yaml(text) + + def _prevent_wrapping_flow_style(self, data: Any) -> None: + if not isinstance(data, (CommentedMap, CommentedSeq)): + return + for key, value, parent_path in nested_items_path(data): + if not isinstance(value, (CommentedMap, CommentedSeq)): + continue + fa: Format = value.fa # pylint: disable=invalid-name + if fa.flow_style(): + predicted_indent = self._predict_indent_length(parent_path, key) + predicted_width = len(str(value)) + if predicted_indent + predicted_width > self.width: + # this flow-style map will probably get line-wrapped, + # so, switch it to block style to avoid the line wrap. + fa.set_block_style() + + def _predict_indent_length(self, parent_path: list[str | int], key: Any) -> int: + indent = 0 + + # each parent_key type tells us what the indent is for the next level. + for parent_key in parent_path: + if isinstance(parent_key, int) and indent == 0: + # root level is a sequence + indent += self.sequence_dash_offset + elif isinstance(parent_key, int): + # next level is a sequence + indent += cast(int, self.sequence_indent) + elif isinstance(parent_key, str): + # next level is a map + indent += cast(int, self.map_indent) + + if isinstance(key, int) and indent == 0: + # flow map is an item in a root-level sequence + indent += self.sequence_dash_offset + elif isinstance(key, int) and indent > 0: + # flow map is in a sequence + indent += cast(int, self.sequence_indent) + elif isinstance(key, str): + # flow map is in a map + indent += len(key + ": ") + + return indent + + # ruamel.yaml only preserves empty (no whitespace) blank lines + # (ie "/n/n" becomes "/n/n" but "/n /n" becomes "/n"). + # So, we need to identify whitespace-only lines to drop spaces before reading. + _whitespace_only_lines_re = re.compile(r"^ +$", re.MULTILINE) + + def _pre_process_yaml(self, text: str) -> tuple[str, str | None]: + """Handle known issues with ruamel.yaml loading. + + Preserve blank lines despite extra whitespace. + Preserve any preamble (aka header) comments before "---". + + For more on preamble comments, see: https://stackoverflow.com/questions/70286108/python-ruamel-yaml-package-how-to-get-header-comment-lines/70287507#70287507 + """ + text = self._whitespace_only_lines_re.sub("", text) + + # I investigated extending ruamel.yaml to capture preamble comments. + # preamble comment goes from: + # DocumentStartToken.comment -> DocumentStartEvent.comment + # Then, in the composer: + # once in composer.current_event + # composer.compose_document() + # discards DocumentStartEvent + # move DocumentStartEvent to composer.last_event + # node = composer.compose_node(None, None) + # all document nodes get composed (events get used) + # discard DocumentEndEvent + # move DocumentEndEvent to composer.last_event + # return node + # So, there's no convenient way to extend the composer + # to somehow capture the comments and pass them on. + + preamble_comments = [] + if "\n---\n" not in text and "\n--- " not in text: + # nothing is before the document start mark, + # so there are no comments to preserve. + return text, None + for line in text.splitlines(True): + # We only need to capture the preamble comments. No need to remove them. + # lines might also include directives. + if line.lstrip().startswith("#") or line == "\n": + preamble_comments.append(line) + elif line.startswith("---"): + break + + return text, "".join(preamble_comments) or None + + @staticmethod + def _post_process_yaml(text: str) -> str: + """Handle known issues with ruamel.yaml dumping. + + Make sure there's only one newline at the end of the file. + + Fix the indent of full-line comments to match the indent of the next line. + See: https://stackoverflow.com/questions/71354698/how-can-i-use-the-ruamel-yaml-rtsc-mode/71355688#71355688 + Also, removes "#" protection from strings that prevents them from being + identified as full line comments in post-processing. + + Make sure null list items don't end in a space. + """ + text = text.rstrip("\n") + "\n" + + lines = text.splitlines(keepends=True) + full_line_comments: list[tuple[int, str]] = [] + for i, line in enumerate(lines): + stripped = line.lstrip() + if not stripped: + # blank line. Move on. + continue + + space_length = len(line) - len(stripped) + + if stripped.startswith("#"): + # got a full line comment + + # allow some full line comments to match the previous indent + if i > 0 and not full_line_comments and space_length: + prev = lines[i - 1] + prev_space_length = len(prev) - len(prev.lstrip()) + if prev_space_length == space_length: + # if the indent matches the previous line's indent, skip it. + continue + + full_line_comments.append((i, stripped)) + elif full_line_comments: + # end of full line comments so adjust to match indent of this line + spaces = " " * space_length + for index, comment in full_line_comments: + lines[index] = spaces + comment + full_line_comments.clear() + + cleaned = line.strip() + if not cleaned.startswith("#") and cleaned.endswith("-"): + # got an empty list item. drop any trailing spaces. + lines[i] = line.rstrip() + "\n" + + text = "".join( + FormattedEmitter.drop_octothorpe_protection(line) for line in lines + ) + return text + + +def clean_json( + obj: Any, + func: Callable[[str], Any] = lambda key: key.startswith("__") + if isinstance(key, str) + else False, +) -> Any: + """ + Remove all keys matching the condition from a nested JSON-like object. + + :param obj: a JSON like object to clean, also returned for chaining. + :param func: a callable that takes a key in argument and return True for each key to delete + """ + if isinstance(obj, dict): + for key in list(obj.keys()): + if func(key): + del obj[key] + else: + clean_json(obj[key], func) + elif isinstance(obj, list): + for i in reversed(range(len(obj))): + if func(obj[i]): + del obj[i] + else: + clean_json(obj[i], func) + else: + # neither a dict nor a list, do nothing + pass + return obj |